Skip to content
Snippets Groups Projects
evaluate_energy.py 3.53 KiB
import os
import numpy as np
import torch
import torch.backends.cudnn
from torch.utils.data import DataLoader
from torch.utils.tensorboard.writer import SummaryWriter

from EIVArchitectures import Networks, initialize_weights
from EIVData.energy_efficiency import load_data
from EIVTrainingRoutines import train_and_store, loss_functions

print('Non-EiV')
from train_noneiv_energy import p, init_std_y_list, seed_list, unscaled_reg, hidden_layers

train_data, test_data = load_data()
test_dataloader = DataLoader(test_data, batch_size=int(np.max((len(test_data), 800))))

seed = seed_list[0]
init_std_y = init_std_y_list[0]
saved_file = os.path.join('saved_networks',
            f'noneiv_energy'\
                    f'_init_std_y_{init_std_y:.3f}_ureg_{unscaled_reg:.1f}'\
                    f'_p_{p:.2f}_seed_{seed}.pkl')

input_dim = train_data[0][0].numel()
output_dim = train_data[0][1].numel()
net = Networks.FNNBer(p=p, init_std_y=init_std_y,
        h=[input_dim, *hidden_layers, output_dim])
train_and_store.open_stored_training(saved_file=saved_file,
        net=net)


# RMSE
x,y = next(iter(test_dataloader))
training_state = net.training
net.train()
out, sigmas = net.predict(x, number_of_draws=100, take_average_of_prediction=True)
if len(y.shape) <=1:
    y = y.view((-1,1))
assert y.shape == out.shape
res = y-out
scale = train_data.dataset.std_labels
scaled_res = res * scale.view((1,-1))
scaled_res = scaled_res.detach().cpu().numpy().flatten()
rmse = np.sqrt(np.mean(scaled_res**2)) 
print(f'RMSE {rmse:.3f}')


# NLL
x,y = next(iter(test_dataloader))
training_state = net.training
net.train()
logdens = net.predictive_logdensity(x, y, number_of_draws=100,
        decouple_dimensions=True,
        scale_labels=train_data.dataset.std_labels.view((-1,))).mean()
if training_state:
    net.train()
else:
    net.eval()
print(f'Dropout predictive {logdens:.3f}')

print('EiV')
from train_eiv_energy import p, init_std_y_list, seed_list, unscaled_reg, hidden_layers, fixed_std_x

train_data, test_data = load_data()
test_dataloader = DataLoader(test_data, batch_size=int(np.max((len(test_data), 800))))

seed = seed_list[0]
init_std_y = init_std_y_list[0]
saved_file = os.path.join('saved_networks',
            f'eiv_energy'\
                    f'_init_std_y_{init_std_y:.3f}_ureg_{unscaled_reg:.1f}'\
                    f'_p_{p:.2f}_seed_{seed}.pkl')

input_dim = train_data[0][0].numel()
output_dim = train_data[0][1].numel()
net = Networks.FNNEIV(p=p, init_std_y=init_std_y,
        h=[input_dim, *hidden_layers, output_dim], fixed_std_x=fixed_std_x)
train_and_store.open_stored_training(saved_file=saved_file,
        net=net)


# RMSE
x,y = next(iter(test_dataloader))
training_state = net.training
noise_state = net.noise_is_on
net.train()
net.noise_on()
out = net.predict(x, number_of_draws=500, take_average_of_prediction=True)[0]
if len(y.shape) <=1:
    y = y.view((-1,1))
assert y.shape == out.shape
res = y-out
scale = train_data.dataset.std_labels
scaled_res = res * scale.view((1,-1))
scaled_res = scaled_res.detach().cpu().numpy().flatten()
rmse = np.sqrt(np.mean(scaled_res**2)) 
if training_state:
    net.train()
else:
    net.eval()
if noise_state:
    net.noise_on()
else:
    net.noise_off()
print(f'RMSE {rmse:.3f}')


# NLL
x,y = next(iter(test_dataloader))
training_state = net.training
net.train()
logdens = net.predictive_logdensity(x, y, number_of_draws=100,
        decouple_dimensions=True,
        scale_labels=train_data.dataset.std_labels.view((-1,))).mean()
if training_state:
    net.train()
else:
    net.eval()
print(f'Dropout predictive {logdens:.3f}')