Skip to content
Snippets Groups Projects
Commit 5660f28b authored by Jörg Martin's avatar Jörg Martin
Browse files

Updated plot_diagonal_uncertainties

parent 407970a7
Branches dev
No related tags found
No related merge requests found
#!/bin/bash
for data in {yacht,naval,linear,protein,concrete,kin8nm,wine,quadratic,power,sine,energy,california,cubic}; do
echo Plotting diagonal uncertainties for $data
python plot_diagonal_uncertainties.py --data $data
echo done!
done
...@@ -12,6 +12,7 @@ import torch ...@@ -12,6 +12,7 @@ import torch
import torch.backends.cudnn import torch.backends.cudnn
from torch.utils.data import DataLoader from torch.utils.data import DataLoader
from tqdm import tqdm from tqdm import tqdm
import matplotlib
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from EIVArchitectures import Networks from EIVArchitectures import Networks
...@@ -21,6 +22,12 @@ from EIVGeneral.coverage_metrics import epistemic_coverage, normalized_std,\ ...@@ -21,6 +22,12 @@ from EIVGeneral.coverage_metrics import epistemic_coverage, normalized_std,\
from EIVData.repeated_sampling import repeated_sampling from EIVData.repeated_sampling import repeated_sampling
from EIVGeneral.linear_evaluation import linear_pred_unc, linear_coverage, compute_par_est_var from EIVGeneral.linear_evaluation import linear_pred_unc, linear_coverage, compute_par_est_var
font = {'family' : 'DejaVu Sans',
'weight' : 'normal',
'size' : 20}
matplotlib.rc('font', **font)
# read in data via --data option # read in data via --data option
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("--data", help="Loads data", default='yacht') parser.add_argument("--data", help="Loads data", default='yacht')
...@@ -36,31 +43,30 @@ with open(os.path.join('configurations',f'noneiv_{data}.json'),'r') as conf_file ...@@ -36,31 +43,30 @@ with open(os.path.join('configurations',f'noneiv_{data}.json'),'r') as conf_file
noneiv_conf_dict = json.load(conf_file) noneiv_conf_dict = json.load(conf_file)
# assuming normalized data was used
try:
assert eiv_conf_dict['normalize']
assert noneiv_conf_dict['normalize']
except KeyError:
pass
normalize = True
long_dataname = eiv_conf_dict["long_dataname"] long_dataname = eiv_conf_dict["long_dataname"]
short_dataname = eiv_conf_dict["short_dataname"] short_dataname = eiv_conf_dict["short_dataname"]
print(f"Evaluating {long_dataname}") print(f"Evaluating {long_dataname}")
scale_outputs = False scale_outputs = False
load_data = importlib.import_module(f'EIVData.{long_dataname}').load_data load_data = importlib.import_module(f'EIVData.{long_dataname}').load_data
train_data, _ = load_data()
input_dim = train_data[0][0].numel()
output_dim = train_data[0][1].numel()
try:
min_x, max_x = importlib.import_module(f'EIVData.{long_dataname}').input_range
except AttributeError:
max_x = torch.ones((1, input_dim))
min_x = -max_x
try: try:
sigma_y = importlib.import_module(f'EIVData.{long_dataname}').y_noise_strength sigma_y = importlib.import_module(f'EIVData.{long_dataname}').y_noise_strength
design_matrix = importlib.import_module(f'EIVData.{long_dataname}').design_matrix design_matrix = importlib.import_module(f'EIVData.{long_dataname}').design_matrix
except AttributeError: except AttributeError:
sigma_y = None sigma_y = None
train_data, test_data = load_data(normalize=normalize)
input_dim = train_data[0][0].numel()
output_dim = train_data[0][1].numel()
# do computations on cpu # do computations on cpu
device = torch.device('cpu') device = torch.device('cpu')
...@@ -136,13 +142,12 @@ def collect_predictions(x, seed=0, ...@@ -136,13 +142,12 @@ def collect_predictions(x, seed=0,
def create_diagonal(train, number_of_steps=100): def create_diagonal(train, min_x, max_x, number_of_steps=100):
input_shape = train[0][0].shape input_shape = train[0][0].shape
assert len(input_shape) == 1 assert len(input_shape) == 1
input_dim = input_shape[0] input_dim = input_shape[0]
ones = 0.75 * torch.ones((1, input_dim))
t = torch.linspace(start=0, end=1, steps=number_of_steps)[...,None] t = torch.linspace(start=0, end=1, steps=number_of_steps)[...,None]
return (1-t) * ones - t * ones return (1-t) * min_x + t * max_x
...@@ -154,7 +159,8 @@ eiv_uncertainties = 0 ...@@ -154,7 +159,8 @@ eiv_uncertainties = 0
number_of_seeds = len(seed_list) number_of_seeds = len(seed_list)
number_of_steps = 100 number_of_steps = 100
for seed in tqdm(seed_list): for seed in tqdm(seed_list):
x_diagonal = create_diagonal(train=train_data, number_of_steps=number_of_steps) x_diagonal = create_diagonal(train=train_data, min_x=min_x, max_x=max_x,
number_of_steps=number_of_steps)
results = collect_predictions(x_diagonal, seed=seed) results = collect_predictions(x_diagonal, seed=seed)
noneiv_uncertainties += 1/number_of_seeds * results['noneiv']['uncertainties'].mean(dim=-1) noneiv_uncertainties += 1/number_of_seeds * results['noneiv']['uncertainties'].mean(dim=-1)
eiv_uncertainties += 1/number_of_seeds * results['eiv']['uncertainties'].mean(dim=-1) eiv_uncertainties += 1/number_of_seeds * results['eiv']['uncertainties'].mean(dim=-1)
...@@ -167,4 +173,5 @@ plt.fill_between(plot_x, noneiv_uncertainties, color='b', alpha=0.5) ...@@ -167,4 +173,5 @@ plt.fill_between(plot_x, noneiv_uncertainties, color='b', alpha=0.5)
plt.fill_between(plot_x, eiv_uncertainties, color='r', alpha=0.5) plt.fill_between(plot_x, eiv_uncertainties, color='r', alpha=0.5)
plt.xlabel(r'$\lambda$') plt.xlabel(r'$\lambda$')
plt.ylabel(r'$u$') plt.ylabel(r'$u$')
plt.tight_layout()
plt.savefig(f'results/figures/diagonal_uncertainties_{data}.pdf') plt.savefig(f'results/figures/diagonal_uncertainties_{data}.pdf')
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment