New processing code in python

This commit is contained in:
Eduardo Cueto-Mendoza 2025-01-31 16:43:18 +00:00
parent 129dfe70e0
commit 4bf7d58b29
Signed by: TastyPancakes
GPG Key ID: 941DF56C7242C3F1
8 changed files with 703 additions and 0 deletions

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
__pycache__
*_DATA
*_proc

116
accuracy_data.py Normal file
View File

@ -0,0 +1,116 @@
from glob import glob
import numpy as np
from aux_functions import load_pickle, save_pickle
EXP_NO = 2
MODEL_SIZE = 1
CRITERIA = ""
data_type = {"cifar": "CIFAR", "mnist": "MNIST"}
experiment_criterias = {
"Early": "/Early_Stopping",
"Efficiency": "/Efficiency_Stopping",
"100": "/100_epoch",
"Accuracy": "/Accuracy_Bound",
"Energy": "/Energy_Bound",
}
model_type = {"bayes": "bayes_exp_*.pkl", "freq": "freq_exp_data_*.pkl"}
EXPERIMENT_PATH = f"./EXPERIMENT_{EXP_NO}_DATA"
models_exp = {
"cifar": {
"bayes": {
1: None,
2: None,
3: None,
4: None,
5: None,
6: None,
7: None,
},
"freq": {
1: None,
2: None,
3: None,
4: None,
5: None,
6: None,
7: None,
},
},
"mnist": {
"bayes": {
1: None,
2: None,
3: None,
4: None,
5: None,
6: None,
7: None,
},
"freq": {
1: None,
2: None,
3: None,
4: None,
5: None,
6: None,
7: None,
},
},
}
path = (
f"{EXPERIMENT_PATH}"
f"{experiment_criterias['Efficiency']}"
f"/{data_type['cifar']}"
f"/{model_type['bayes']}"
)
path = glob(path)
for t in zip(range(1, 8), path):
models_exp["cifar"]["bayes"][t[0]] = np.concatenate(load_pickle(t[1]), axis=0)
path = (
f"{EXPERIMENT_PATH}"
f"{experiment_criterias['Efficiency']}"
f"/{data_type['mnist']}"
f"/{model_type['bayes']}"
)
path = glob(path)
for t in zip(range(1, 8), path):
models_exp["mnist"]["bayes"][t[0]] = np.concatenate(load_pickle(t[1]), axis=0)
path = (
f"{EXPERIMENT_PATH}"
f"{experiment_criterias['Efficiency']}"
f"/{data_type['cifar']}"
f"/{model_type['freq']}"
)
path = glob(path)
for t in zip(range(1, 8), path):
models_exp["cifar"]["freq"][t[0]] = np.concatenate(load_pickle(t[1]), axis=0)
path = (
f"{EXPERIMENT_PATH}"
f"{experiment_criterias['Efficiency']}"
f"/{data_type['mnist']}"
f"/{model_type['freq']}"
)
path = glob(path)
for t in zip(range(1, 8), path):
models_exp["mnist"]["freq"][t[0]] = np.concatenate(load_pickle(t[1]), axis=0)
# print(models_exp)
file_name = f"Experiment_{EXP_NO}_accuracy_data.pkl"
save_pickle(file_name, models_exp)

17
aux_functions.py Normal file
View File

@ -0,0 +1,17 @@
import pickle
def load_pickle(fpath):
gpu_data = []
with open(fpath, "rb") as openfile:
while True:
try:
gpu_data = pickle.load(openfile)
except EOFError:
break
return gpu_data
def save_pickle(fpath, data):
with open(fpath, "wb") as f:
pickle.dump(data, f)

116
energy_data.py Normal file
View File

@ -0,0 +1,116 @@
from glob import glob
import numpy as np
from aux_functions import load_pickle, save_pickle
EXP_NO = 2
MODEL_SIZE = 1
CRITERIA = ""
data_type = {"cifar": "CIFAR", "mnist": "MNIST"}
experiment_criterias = {
"Early": "/Early_Stopping",
"Efficiency": "/Efficiency_Stopping",
"100": "/100_epoch",
"Accuracy": "/Accuracy_Bound",
"Energy": "/Energy_Bound",
}
model_type = {"bayes": "bayes_exp_*.pkl", "freq": "freq_exp_data_*.pkl"}
EXPERIMENT_PATH = f"./EXPERIMENT_{EXP_NO}_DATA"
models_exp = {
"cifar": {
"bayes": {
1: None,
2: None,
3: None,
4: None,
5: None,
6: None,
7: None,
},
"freq": {
1: None,
2: None,
3: None,
4: None,
5: None,
6: None,
7: None,
},
},
"mnist": {
"bayes": {
1: None,
2: None,
3: None,
4: None,
5: None,
6: None,
7: None,
},
"freq": {
1: None,
2: None,
3: None,
4: None,
5: None,
6: None,
7: None,
},
},
}
path = (
f"{EXPERIMENT_PATH}"
f"{experiment_criterias['Efficiency']}"
f"/{data_type['cifar']}"
f"/{model_type['bayes']}"
)
path = glob(path)
for t in zip(range(1, 8), path):
models_exp["cifar"]["bayes"][t[0]] = np.concatenate(load_pickle(t[1]), axis=0)
path = (
f"{EXPERIMENT_PATH}"
f"{experiment_criterias['Efficiency']}"
f"/{data_type['mnist']}"
f"/{model_type['bayes']}"
)
path = glob(path)
for t in zip(range(1, 8), path):
models_exp["mnist"]["bayes"][t[0]] = np.concatenate(load_pickle(t[1]), axis=0)
path = (
f"{EXPERIMENT_PATH}"
f"{experiment_criterias['Efficiency']}"
f"/{data_type['cifar']}"
f"/{model_type['freq']}"
)
path = glob(path)
for t in zip(range(1, 8), path):
models_exp["cifar"]["freq"][t[0]] = np.concatenate(load_pickle(t[1]), axis=0)
path = (
f"{EXPERIMENT_PATH}"
f"{experiment_criterias['Efficiency']}"
f"/{data_type['mnist']}"
f"/{model_type['freq']}"
)
path = glob(path)
for t in zip(range(1, 8), path):
models_exp["mnist"]["freq"][t[0]] = np.concatenate(load_pickle(t[1]), axis=0)
# print(models_exp)
file_name = f"Experiment_{EXP_NO}_accuracy_data.pkl"
save_pickle(file_name, models_exp)

94
find_epochs.py Normal file
View File

@ -0,0 +1,94 @@
from glob import glob
# can be 1,2 or 3
exp_no = 2
# can be 1 to 7
model_size = 1
crit = "Early"
experiment_criterias = {
"Early": "/Early_Stopping",
"Efficiency": "/Efficiency_Stopping",
"100": "/100_epoch",
"Accuracy": "/Accuracy_Bound",
"Energy": "/Energy_Bound",
}
bcnn_path = {
"prefix_path_mnist": "/checkpoints/MNIST/bayesian",
"mid_path_mnist": "/model_lenet_lrt_softplus_",
"prefix_path_cifar": "/checkpoints/CIFAR10/bayesian",
"mid_path_cifar": "/model_lenet_lrt_softplus_",
"end_path": "_epoch_",
"noise_path": "_noise_",
"suffix_path": ".pt",
}
lenet_path = {
"prefix_path_mnist": "/checkpoints/MNIST/frequentist",
"mid_path_mnist": "/model_lenet_",
"prefix_path_cifar": "/checkpoints/CIFAR10/frequentist",
"mid_path_cifar": "/model_lenet_",
"end_path": "_epoch_",
"noise_path": "_noise_",
"suffix_path": ".pt",
}
# print(len(glob(experiment_2_path)))
def get_experiment_epochs(
experiment_number=2, size=7, model_type="bcnn", data_type="cifar", stopping_criteria="Efficiency"
):
models_epochs = {
1: None,
2: None,
3: None,
4: None,
5: None,
6: None,
7: None,
}
EXPERIMENT_PATH = f"./EXPERIMENT_{experiment_number}_DATA"
if (model_type == "bcnn") and (data_type == "cifar"):
for s in range(size):
experiment_path = (
f"{EXPERIMENT_PATH}{experiment_criterias[stopping_criteria]}"
f"{bcnn_path['prefix_path_cifar']}{bcnn_path['mid_path_cifar']}"
f"{s+1}*.pt"
)
models_epochs[s + 1] = len(glob(experiment_path))
return models_epochs
if (model_type == "bcnn") and (data_type == "mnist"):
for s in range(size):
experiment_path = (
f"{EXPERIMENT_PATH}{experiment_criterias[stopping_criteria]}"
f"{bcnn_path['prefix_path_mnist']}{bcnn_path['mid_path_mnist']}"
f"{s+1}*.pt"
)
models_epochs[s + 1] = len(glob(experiment_path))
return models_epochs
if (model_type == "lenet") and (data_type == "cifar"):
for s in range(size):
experiment_path = (
f"{EXPERIMENT_PATH}{experiment_criterias[stopping_criteria]}"
f"{lenet_path['prefix_path_cifar']}{lenet_path['mid_path_cifar']}"
f"{s+1}*.pt"
)
models_epochs[s + 1] = len(glob(experiment_path))
return models_epochs
if (model_type == "lenet") and (data_type == "mnist"):
for s in range(size):
experiment_path = (
f"{EXPERIMENT_PATH}{experiment_criterias[stopping_criteria]}"
f"{lenet_path['prefix_path_mnist']}{lenet_path['mid_path_mnist']}"
f"{s+1}*.pt"
)
models_epochs[s + 1] = len(glob(experiment_path))
return models_epochs
# ./EXPERIMENT_2_DATA/Efficiency_Stopping/checkpoints/MNIST/frequentist/size_/model_lenet_7*.pt
# print(get_experiment_epochs(experiment_number=2, size=7, model_type="bcnn", data_type="mnist"))

87
functions.py Normal file
View File

@ -0,0 +1,87 @@
import pickle
import torch
import torch.linalg as alg
def square_matrix(tensor):
tensor_size = tensor.size()
if len(tensor_size) == 0:
return tensor
if len(tensor_size) == 1:
temp = torch.zeros([tensor_size[0], tensor_size[0] - 1])
return torch.cat((temp.T, tensor.reshape(1, tensor_size[0])))
if len(tensor_size) == 2:
if tensor_size[0] > tensor_size[1]:
temp = torch.zeros([tensor_size[0], tensor_size[0] - tensor_size[1]])
return torch.cat((temp.T, tensor))
if tensor_size[0] < tensor_size[1]:
temp = torch.zeros([tensor_size[1], tensor_size[1] - tensor_size[0]])
return torch.cat((temp.T, tensor))
return tensor
if len(tensor_size) > 2:
temp_tensor = tensor.detach().clone()
for i, x in enumerate(tensor):
# print("i: {}".format(i))
for j, t in enumerate(x):
# print("j: {}".format(j))
t_size = t.size()
if t_size[0] > t_size[1]:
temp = torch.zeros([t_size[0], t_size[0] - t_size[1]])
temp_tensor[i][j] = torch.cat((temp.T, t))
elif t_size[0] < t_size[1]:
temp = torch.zeros([t_size[1], t_size[1] - t_size[0]])
temp_tensor[i][j] = torch.cat((temp.T, t))
else:
temp_tensor[i][j] = t
return temp_tensor
def neumann_entropy(tensor):
tensor_size = tensor.size()
if len(tensor_size) == 0:
return tensor
if len(tensor_size) == 1:
return 0
if len(tensor_size) == 2:
e = alg.eigvals(tensor) # .real
# se = sum(e)
# e = e / se
temp_abs = torch.abs(e)
# temp_abs = e
temp = torch.log(temp_abs).real
temp = torch.nan_to_num(temp, nan=0.0, posinf=0.0, neginf=0.0)
return -1 * torch.sum(temp_abs * temp)
if len(tensor_size) > 2:
for i, x in enumerate(tensor):
for j, t in enumerate(x):
e = alg.eigvals(t) # .real
# se = sum(e)
# e = e / se
temp_abs = torch.abs(e)
# temp_abs = e
temp = torch.log(temp_abs).real
temp = torch.nan_to_num(temp, nan=0.0, posinf=0.0, neginf=0.0)
return -1 * torch.sum(temp_abs * temp)
def load_pickle(fpath):
with open(fpath, "rb") as f:
data = pickle.load(f)
return data
def save_pickle(pickle_name, data_dump):
with open(pickle_name, "wb") as f:
pickle.dump(data_dump, f)
def chunks(lst, n):
"""Yield successive n-sized chunks from lst."""
for i in range(0, len(lst), n):
yield lst[i : i + n]
def split(lst, n):
k, m = divmod(len(lst), n)
return (lst[i * k + min(i, m) : (i + 1) * k + min(i + 1, m)] for i in range(n))

261
proc_saved_dnns.py Normal file
View File

@ -0,0 +1,261 @@
import pickle
import torch
from find_epochs import get_experiment_epochs
# import warnings
# import numpy as np
# model_lenet_1_epoch_4.pt
# can be 1,2 or 3
exp_no = 2
# can be 1 to 7
model_size = 1
EXPERIMENT_2_PATH = "./EXPERIMENT_2_DATA"
EXPERIMENT_3_PATH = "./EXPERIMENT_3_DATA"
crit = "Efficiency"
experiment_criterias = {
"Early": "/Early_Stopping",
"Efficiency": "/Efficiency_Stopping",
"100": "/100_epoch",
"Accuracy": "/Accuracy_Bound",
"Energy": "/Energy_Bound",
}
bcnn_path = {
"prefix_path_mnist": "/checkpoints/MNIST/bayesian",
"mid_path_mnist": "/model_lenet_lrt_softplus_",
"prefix_path_cifar": "/checkpoints/CIFAR10/bayesian",
"mid_path_cifar": "/model_lenet_lrt_softplus_",
"end_path": "_epoch_",
"noise_path": "_noise_",
"suffix_path": ".pt",
}
lenet_path = {
"prefix_path_mnist": "/checkpoints/MNIST/frequentist",
"mid_path_mnist": "/model_lenet_",
"prefix_path_cifar": "/checkpoints/CIFAR10/frequentist",
"mid_path_cifar": "/model_lenet_",
"end_path": "_epoch_",
"noise_path": "_noise_",
"suffix_path": ".pt",
}
experiment_2_criterias = {
"Early": "/Early_Stopping",
"Efficiency": "/Efficiency_Stopping",
}
model_noise = None
datasets = ["MNIST", "CIFAR10"]
bcnn_layers = [
"conv1.W_mu",
"conv1.W_rho",
"conv1.bias_mu",
"conv1.bias_rho",
"conv2.W_mu",
"conv2.W_rho",
"conv2.bias_mu",
"conv2.bias_rho",
"fc1.W_mu",
"fc1.W_rho",
"fc1.bias_mu",
"fc1.bias_rho",
"fc2.W_mu",
"fc2.W_rho",
"fc2.bias_mu",
"fc2.bias_rho",
"fc3.W_mu",
"fc3.W_rho",
"fc3.bias_mu",
"fc3.bias_rho",
]
lenet_layers = [
"conv1.weight",
"conv1.bias",
"conv2.weight",
"conv2.bias",
"fc1.weight",
"fc1.bias",
"fc2.weight",
"fc2.bias",
"fc3.weight",
"fc3.bias",
]
bcnn_base = {
"conv1.W_mu": None,
"conv1.W_rho": None,
"conv1.bias_mu": None,
"conv1.bias_rho": None,
"conv2.W_mu": None,
"conv2.W_rho": None,
"conv2.bias_mu": None,
"conv2.bias_rho": None,
"fc1.W_mu": None,
"fc1.W_rho": None,
"fc1.bias_mu": None,
"fc1.bias_rho": None,
"fc2.W_mu": None,
"fc2.W_rho": None,
"fc2.bias_mu": None,
"fc2.bias_rho": None,
"fc3.W_mu": None,
"fc3.W_rho": None,
"fc3.bias_mu": None,
"fc3.bias_rho": None,
}
lenet_base = {
"conv1.weight": None,
"conv1.bias": None,
"conv2.weight": None,
"conv2.bias": None,
"fc1.weight": None,
"fc1.bias": None,
"fc2.weight": None,
"fc2.bias": None,
"fc3.weight": None,
"fc3.bias": None,
}
bcnn_cifar_size_data = {}
bcnn_mnist_size_data = {}
lenet_cifar_size_data = {}
lenet_mnist_size_data = {}
# CUDA settings
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
def load_model(save_path, gpu):
gpu = str(gpu)
if gpu == "cuda:0":
# warnings.warn("I'm in gpu")
model = torch.load(save_path, weights_only=True)
return model
elif gpu == "cpu":
# warnings.warn("I'm in cpu")
model = torch.load(save_path, map_location=torch.device("cpu"), weights_only=True)
return model
if __name__ == "__main__":
for model_size in range(1, 8):
bcnn_cifar = []
for model_epoch in range(
0,
get_experiment_epochs(experiment_number=2, size=7, model_type="bcnn", data_type="cifar")[
model_size
],
):
# LeNet CIFAR
bcnn_cifar.append(
load_model(
(
f"{EXPERIMENT_2_PATH}{experiment_2_criterias['Efficiency']}"
f"{bcnn_path['prefix_path_cifar']}{bcnn_path['mid_path_cifar']}"
f"{model_size}{bcnn_path['end_path']}{model_epoch}{bcnn_path['noise_path']}"
f"{model_noise}{bcnn_path['suffix_path']}"
),
"cpu",
)
)
bcnn_cifar_size_data[model_size] = {k: v for k, v in enumerate(bcnn_cifar)}
del bcnn_cifar
lenet_cifar = []
for model_epoch in range(
0,
get_experiment_epochs(experiment_number=2, size=7, model_type="lenet", data_type="cifar")[
model_size
],
):
# LeNet CIFAR
lenet_cifar.append(
load_model(
f"{EXPERIMENT_2_PATH}{experiment_2_criterias['Efficiency']}"
f"{lenet_path['prefix_path_cifar']}{lenet_path['mid_path_cifar']}"
f"{model_size}{lenet_path['end_path']}{model_epoch+1}{lenet_path['noise_path']}"
f"{model_noise}{lenet_path['suffix_path']}",
"cpu",
)
)
lenet_cifar_size_data[model_size] = {k: v for k, v in enumerate(lenet_cifar)}
del lenet_cifar
bcnn_mnist = []
for model_epoch in range(
0,
get_experiment_epochs(experiment_number=2, size=7, model_type="bcnn", data_type="mnist")[
model_size
],
):
# LeNet MNIST
bcnn_mnist.append(
load_model(
f"{EXPERIMENT_2_PATH}{experiment_2_criterias['Efficiency']}"
f"{bcnn_path['prefix_path_mnist']}{bcnn_path['mid_path_mnist']}"
f"{model_size}{bcnn_path['end_path']}{model_epoch}{bcnn_path['noise_path']}"
f"{model_noise}{bcnn_path['suffix_path']}",
"cpu",
)
)
bcnn_mnist_size_data[model_size] = {k: v for k, v in enumerate(bcnn_mnist)}
del bcnn_mnist
lenet_mnist = []
for model_epoch in range(
0,
get_experiment_epochs(experiment_number=2, size=7, model_type="lenet", data_type="mnist")[
model_size
],
):
# LeNet MNIST
lenet_mnist.append(
load_model(
f"{EXPERIMENT_2_PATH}{experiment_2_criterias['Efficiency']}"
f"{lenet_path['prefix_path_mnist']}{lenet_path['mid_path_mnist']}"
f"{model_size}{lenet_path['end_path']}{model_epoch+1}{lenet_path['noise_path']}"
f"{model_noise}{lenet_path['suffix_path']}",
"cpu",
)
)
lenet_mnist_size_data[model_size] = {k: v for k, v in enumerate(lenet_mnist)}
del lenet_mnist
# Saving all here
with open("bayes_data_cifar.pkl", "wb") as f:
pickle.dump(bcnn_cifar_size_data, f)
with open("lenet_data_cifar.pkl", "wb") as f:
pickle.dump(lenet_cifar_size_data, f)
with open("bayes_data_mnist.pkl", "wb") as f:
pickle.dump(bcnn_mnist_size_data, f)
with open("lenet_data_mnist.pkl", "wb") as f:
pickle.dump(lenet_mnist_size_data, f)

9
pyproject.toml Normal file
View File

@ -0,0 +1,9 @@
[tool.pylint.'FORMAT']
max-line-length = 110
[tool.pylint.'MESSAGES CONTROL']
disable = ["missing-module-docstring", "missing-function-docstring", "import-error"]
[tool.black]
line-length = 110