Modified energy summing function
This commit is contained in:
parent
1e37d1f1ae
commit
899e313cd9
@ -61,12 +61,16 @@ def get_sample_of_gpu():
|
||||
#return temp
|
||||
|
||||
def total_watt_consumed():
|
||||
with open(pickle_name, 'rb') as f:
|
||||
x = pickle.load(f)
|
||||
x = np.array(x)
|
||||
x = x[:,0]
|
||||
y = [int(re.findall("\d+",xi)[0]) for xi in x]
|
||||
return sum(y)
|
||||
with (open(pickle_name, "rb")) as file:
|
||||
while True:
|
||||
try:
|
||||
x = pickle.load(file)
|
||||
except EOFError:
|
||||
break
|
||||
x = np.array(x)
|
||||
x = x[:,0]
|
||||
y = [float(re.findall("\d+.\d+",xi)[0]) for xi in x]
|
||||
return sum(y)
|
||||
|
||||
if __name__ == '__main__':
|
||||
dataDump = []
|
||||
|
@ -7,9 +7,9 @@ all_args = argparse.ArgumentParser()
|
||||
|
||||
def makeArguments(arguments: ArgumentParser) -> dict:
|
||||
all_args.add_argument("-b", "--Bayesian", action="store", dest="b",
|
||||
type=int, choices=range(1,7), help="Bayesian model of size x")
|
||||
type=int, choices=range(1,8), help="Bayesian model of size x")
|
||||
all_args.add_argument("-f", "--Frequentist", action="store", dest="f",
|
||||
type=int, choices=range(1,7), help="Frequentist model of size x")
|
||||
type=int, choices=range(1,8), help="Frequentist model of size x")
|
||||
all_args.add_argument("-E", "--EarlyStopping", action="store_true",
|
||||
help="Early Stopping criteria")
|
||||
all_args.add_argument("-e", "--EnergyBound", action="store_true",
|
||||
|
@ -3,7 +3,7 @@
|
||||
import os
|
||||
n_epochs = 100
|
||||
sens = 1e-9
|
||||
energy_thrs = 100000
|
||||
energy_thrs = 10000
|
||||
acc_thrs = 0.99
|
||||
lr = 0.001
|
||||
num_workers = 4
|
||||
|
@ -1,5 +1,4 @@
|
||||
#!/bin/env bash
|
||||
|
||||
#powerstat -z 0.5 1000000 > $1
|
||||
powerstat -D > $1
|
||||
|
||||
sudo powerstat -D -z 0.5 10000000 > $1
|
||||
|
@ -8,11 +8,11 @@ import pickle
|
||||
import metrics
|
||||
import argparse
|
||||
import numpy as np
|
||||
import amd_sample_draw
|
||||
import config_bayesian as cfg
|
||||
from datetime import datetime
|
||||
from torch.nn import functional as F
|
||||
from torch.optim import Adam, lr_scheduler
|
||||
import amd_sample_draw
|
||||
from models.BayesianModels.BayesianLeNet import BBBLeNet
|
||||
from models.BayesianModels.BayesianAlexNet import BBBAlexNet
|
||||
from models.BayesianModels.Bayesian3Conv3FC import BBB3Conv3FC
|
||||
@ -124,8 +124,8 @@ def run(dataset, net_type):
|
||||
optimizer = Adam(net.parameters(), lr=lr_start)
|
||||
lr_sched = lr_scheduler.ReduceLROnPlateau(optimizer, patience=6, verbose=True)
|
||||
#valid_loss_max = np.Inf
|
||||
if stp == 2:
|
||||
early_stop = []
|
||||
#if stp == 2:
|
||||
early_stop = []
|
||||
train_data = []
|
||||
for epoch in range(n_epochs): # loop over the dataset multiple times
|
||||
|
||||
@ -139,25 +139,26 @@ def run(dataset, net_type):
|
||||
epoch, train_loss, train_acc, valid_loss, valid_acc, train_kl))
|
||||
|
||||
if stp == 2:
|
||||
print('Using early stopping')
|
||||
if earlyStopping(early_stop,train_acc,cfg.sens) == None:
|
||||
#print('Using early stopping')
|
||||
if earlyStopping(early_stop,train_acc,epoch,cfg.sens) == 1:
|
||||
break
|
||||
elif stp == 3:
|
||||
print('Using energy bound')
|
||||
if energyBound(cfg.energy_thrs) == None:
|
||||
#print('Using energy bound')
|
||||
if energyBound(cfg.energy_thrs) == 1:
|
||||
break
|
||||
elif stp == 4:
|
||||
print('Using accuracy bound')
|
||||
if accuracyBound(cfg.acc_thrs) == None:
|
||||
#print('Using accuracy bound')
|
||||
if accuracyBound(cfg.acc_thrs) == 1:
|
||||
break
|
||||
else:
|
||||
print('Training for {} epochs'.format(cfg.n_epochs))
|
||||
|
||||
if sav == 1:
|
||||
# save model when finished
|
||||
if epoch == n_epochs:
|
||||
if epoch == cfg.n_epochs-1:
|
||||
torch.save(net.state_dict(), ckpt_name)
|
||||
|
||||
|
||||
with open("bayes_exp_data_"+str(cfg.wide)+".pkl", 'wb') as f:
|
||||
pickle.dump(train_data, f)
|
||||
|
||||
|
@ -90,8 +90,8 @@ def run(dataset, net_type):
|
||||
optimizer = Adam(net.parameters(), lr=lr)
|
||||
lr_sched = lr_scheduler.ReduceLROnPlateau(optimizer, patience=6, verbose=True)
|
||||
#valid_loss_min = np.Inf
|
||||
if stp == 2:
|
||||
early_stop = []
|
||||
#if stp == 2:
|
||||
early_stop = []
|
||||
train_data = []
|
||||
for epoch in range(1, n_epochs+1):
|
||||
|
||||
@ -107,14 +107,17 @@ def run(dataset, net_type):
|
||||
epoch, train_loss, train_acc, valid_loss, valid_acc))
|
||||
|
||||
if stp == 2:
|
||||
print('Using early stopping')
|
||||
earlyStopping(early_stop,train_acc,cfg.sens)
|
||||
#print('Using early stopping')
|
||||
if earlyStopping(early_stop,valid_acc,epoch,cfg.sens) == 1:
|
||||
break
|
||||
elif stp == 3:
|
||||
print('Using energy bound')
|
||||
energyBound(cfg.energy_thrs)
|
||||
#print('Using energy bound')
|
||||
if energyBound(cfg.energy_thrs) == 1:
|
||||
break
|
||||
elif stp == 4:
|
||||
print('Using accuracy bound')
|
||||
accuracyBound(cfg.acc_thrs)
|
||||
#print('Using accuracy bound')
|
||||
if accuracyBound(train_acc,0.70) == 1:
|
||||
break
|
||||
else:
|
||||
print('Training for {} epochs'.format(cfg.n_epochs))
|
||||
|
||||
@ -133,7 +136,7 @@ if __name__ == '__main__':
|
||||
print("Initial Time =", current_time)
|
||||
parser = argparse.ArgumentParser(description = "PyTorch Frequentist Model Training")
|
||||
parser.add_argument('--net_type', default='lenet', type=str, help='model')
|
||||
parser.add_argument('--dataset', default='MNIST', type=str, help='dataset = [MNIST/CIFAR10/CIFAR100]')
|
||||
parser.add_argument('--dataset', default='CIFAR10', type=str, help='dataset = [MNIST/CIFAR10/CIFAR100]')
|
||||
args = parser.parse_args()
|
||||
run(args.dataset, args.net_type)
|
||||
now = datetime.now()
|
||||
|
@ -1,17 +1,17 @@
|
||||
import pickle
|
||||
|
||||
gpu_data = []
|
||||
with (open("bayes_wattdata_1.pkl", "rb")) as openfile:
|
||||
with (open("freq_wattdata_1.0.pkl", "rb")) as openfile:
|
||||
while True:
|
||||
try:
|
||||
gpu_data.append(pickle.load(openfile))
|
||||
gpu_data = pickle.load(openfile)
|
||||
except EOFError:
|
||||
break
|
||||
|
||||
exp_data = []
|
||||
with (open("bayes_exp_data_1.pkl", "rb")) as openfile:
|
||||
while True:
|
||||
try:
|
||||
exp_data.append(pickle.load(openfile))
|
||||
except EOFError:
|
||||
break
|
||||
#exp_data = []
|
||||
#with (open("bayes_exp_data_6.pkl", "rb")) as openfile:
|
||||
# while True:
|
||||
# try:
|
||||
# exp_data = pickle.load(openfile)
|
||||
# except EOFError:
|
||||
# break
|
||||
|
Loading…
Reference in New Issue
Block a user