Skip to content
GitLab
Explore
Sign in
Register
Primary navigation
Search or go to…
Project
J
journal_eiv
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Container registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Jörg Martin
journal_eiv
Commits
9b4b8071
Commit
9b4b8071
authored
3 years ago
by
Jörg Martin
Browse files
Options
Downloads
Patches
Plain Diff
coverage_metrices added
parent
ddee5e7e
No related branches found
No related tags found
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
EIVPackage/EIVArchitectures/Networks.py
+36
-2
36 additions, 2 deletions
EIVPackage/EIVArchitectures/Networks.py
EIVPackage/EIVGeneral/coverage_metrices.py
+81
-0
81 additions, 0 deletions
EIVPackage/EIVGeneral/coverage_metrices.py
with
117 additions
and
2 deletions
EIVPackage/EIVArchitectures/Networks.py
+
36
−
2
View file @
9b4b8071
...
@@ -249,12 +249,13 @@ class FNNEIV(nn.Module):
...
@@ -249,12 +249,13 @@ class FNNEIV(nn.Module):
number_of_parameter_chunks
=
number_of_parameter_chunks
,
number_of_parameter_chunks
=
number_of_parameter_chunks
,
remove_graph
=
remove_graph
,
remove_graph
=
remove_graph
,
take_average_of_prediction
=
False
)
take_average_of_prediction
=
False
)
# Add "repetition" dimension to y and
out
# Add "repetition" dimension to y and
sigmas
y
=
y
[:,
None
,...]
y
=
y
[:,
None
,...]
sigmas
=
sigmas
[:,
None
,...]
sigmas
=
sigmas
[:,
None
,...]
# add an output axis if necessary
if
len
(
y
.
shape
)
<=
2
:
if
len
(
y
.
shape
)
<=
2
:
# add an output axis if necessary
y
=
y
[...,
None
]
y
=
y
[...,
None
]
if
len
(
sigmas
.
shape
)
<=
2
:
sigmas
=
sigmas
[...,
None
]
sigmas
=
sigmas
[...,
None
]
# squeeze last dimensions into one
# squeeze last dimensions into one
y
=
y
.
view
((
*
y
.
shape
[:
2
],
-
1
))
y
=
y
.
view
((
*
y
.
shape
[:
2
],
-
1
))
...
@@ -285,6 +286,39 @@ class FNNEIV(nn.Module):
...
@@ -285,6 +286,39 @@ class FNNEIV(nn.Module):
else
:
else
:
return
predictive_log_density_values
return
predictive_log_density_values
def
predict_mean_and_unc
(
self
,
x
,
number_of_draws
=
[
100
,
5
],
number_of_parameter_chunks
=
None
,
remove_graph
=
True
):
"""
Take the mean and standard deviation over `number_of_draws` forward
passes and return them together with the predicted sigmas.
**Note**: This method does neither touch the input noise nor Dropout.
The corresponding setting is left to the user!
:param x: A torch.tensor, the input
:param number_of_draws: An integer or a list. If an integer
`number_of_draws`, will be converted internally to
`[number_of_draws,1]`.Numbers of draws to obtain from x via parameter
sampling (first element) and noise input sampling (second element).
:param number_of_parameter_chunks: An integer or None (default). If
None, the second element of `number_of_draws` will be taken (and will
thus be identical to 1 if `number_of_draws` is an integer, see above).
Samples in the parameter space will be divided into
`number_of_parameter_chunks` chunks when collected. Can be used to
reduced the memory usage.
:param remove_graph: If True (default) the output will
be detached to save memory
:return: mean, std, sigmas
"""
out
,
sigmas
=
self
.
predict
(
x
=
x
,
number_of_draws
=
number_of_draws
,
number_of_parameter_chunks
=
number_of_parameter_chunks
,
remove_graph
=
remove_graph
,
take_average_of_prediction
=
False
)
mean
=
torch
.
mean
(
out
,
dim
=
1
)
std
=
torch
.
std
(
out
,
dim
=
1
)
return
mean
,
std
,
sigmas
class
FNNBer
(
nn
.
Module
):
class
FNNBer
(
nn
.
Module
):
"""
"""
...
...
This diff is collapsed.
Click to expand it.
EIVPackage/EIVGeneral/coverage_metrices.py
0 → 100644
+
81
−
0
View file @
9b4b8071
"""
Computes the coverage of observations by predictions and uncertainty and
similar quantities. This module contains three functions
- epistemic_coverage: Computes the coverage of observations by predictions and
their epistemic uncertainty and the averaged theoretical ground truth. If
`normalized` is set to True, residuals are normalized and a fixed interval
length is considered.
- normalized_std: Computes the standard deviation of the normalized residuals.
"""
import
numpy
as
np
import
scipy.stats
import
torch
def
logical_and_along_dimension
(
x
,
dim
=-
1
,
keepdim
=
False
):
"""
For a boolean tensor `x` perform a logical AND along the axis `dim`.
If `keepdim` is true the axis `dim` will be kept (defaults to False).
"""
return
torch
.
prod
(
x
.
to
(
torch
.
bool
),
dim
=
dim
,
keepdim
=
keepdim
).
to
(
torch
.
bool
)
def
multivariate_interval_length
(
dim
,
q
=
0.95
):
"""
Returns the half side length of multivariate cube, symmetrically centered
around 0 such that its measure under a standard normal distribution equals
`q`.
:param dim: A non-negative integer, the dimension.
:param q: Float, should be between 0 and 1. Defaults to 0.95.
"""
# use independence of components to reduce to a univariate quantile
univariate_quantile
=
0.5
*
(
1
+
0.95
**
(
1
/
dim
))
return
scipy
.
stats
.
norm
.
ppf
(
univariate_quantile
)
def
epistemic_coverage
(
mean_unc
,
y
,
q
=
0.95
,
normalize_errors
=
False
):
mean
,
epis_unc
,
aleat_unc
=
mean_unc
assert
epis_unc
.
shape
==
aleat_unc
.
shape
assert
mean
.
shape
==
epis_unc
.
shape
# Add feature dimension to y if missing
if
len
(
y
.
shape
)
<=
1
:
y
=
y
.
view
((
-
1
,
1
))
assert
y
.
shape
==
mean
.
shape
# fix interval based on epis_unc
interval_length
=
multivariate_interval_length
(
dim
=
y
.
shape
[
1
],
q
=
q
)
\
*
epis_unc
total_unc
=
torch
.
sqrt
(
epis_unc
**
2
+
aleat_unc
**
2
)
# numerical computation
errors
=
mean
-
y
if
normalize_errors
:
assert
errors
.
shape
==
total_unc
.
shape
errors
/=
total_unc
check_if_in_interval
=
logical_and_along_dimension
(
torch
.
abs
(
errors
)
<=
interval_length
,
dim
=
1
)
numerical_coverage
=
torch
.
mean
(
check_if_in_interval
.
to
(
torch
.
float32
)
).
cpu
().
detach
().
item
()
# theoretical computation
if
not
normalize_errors
:
cdf_args
=
(
interval_length
/
total_unc
).
detach
().
cpu
().
numpy
()
cdf_values
=
scipy
.
stats
.
norm
.
cdf
(
cdf_args
)
prob_values
=
2
*
cdf_values
-
1
assert
len
(
cdf_values
.
shape
)
==
2
theoretical_coverage
=
np
.
mean
(
np
.
prod
(
prob_values
,
axis
=
1
)).
item
()
else
:
theoretical_coverage
=
q
return
numerical_coverage
,
theoretical_coverage
def
normalized_std
(
mean_unc
,
y
):
mean
,
epis_unc
,
aleat_unc
=
mean_unc
assert
epis_unc
.
shape
==
aleat_unc
.
shape
assert
mean
.
shape
==
epis_unc
.
shape
# Add feature dimension to y if missing
if
len
(
y
.
shape
)
<=
1
:
y
=
y
.
view
((
-
1
,
1
))
assert
y
.
shape
==
mean
.
shape
total_unc
=
torch
.
sqrt
(
epis_unc
**
2
+
aleat_unc
**
2
)
# numerical computation
errors
=
mean
-
y
assert
errors
.
shape
==
total_unc
.
shape
errors
/=
total_unc
return
torch
.
mean
(
torch
.
std
(
errors
,
dim
=
0
)).
item
()
This diff is collapsed.
Click to expand it.
Jörg Martin
@JoergMartin
mentioned in issue
#2 (closed)
·
3 years ago
mentioned in issue
#2 (closed)
mentioned in issue #2
Toggle commit list
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment