From 7d59b862c18e2966a580b8ad4d61327ecf41242e Mon Sep 17 00:00:00 2001 From: Jim Martens Date: Wed, 6 Mar 2019 15:06:18 +0100 Subject: [PATCH] Added paper from Kendall and Gal Signed-off-by: Jim Martens --- ma.bib | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/ma.bib b/ma.bib index 038cafb..3deef8d 100644 --- a/ma.bib +++ b/ma.bib @@ -131,7 +131,6 @@ and 17.0% which is considerably better than the previous state-of-the-art. The n volume = {30}, publisher = {Curran Associates, Inc.}, pages = {6402--6413}, - url = {http://papers.nips.cc/paper/7219-simple-and-scalable-predictive-uncertainty-estimation-using-deep-ensembles.pdf}, abstract = {Deep neural networks (NNs) are powerful black box predictors that have recently achieved impressive performance on a wide spectrum of tasks. Quantifying predictive uncertainty in NNs is a challenging and yet unsolved problem. Bayesian NNs, which learn a distribution over weights, are currently the state-of-the-art for estimating predictive uncertainty; however these require significant modifications to the training procedure and are computationally expensive compared to standard (non-Bayesian) NNs. We propose an alternative to Bayesian NNs that is simple to implement, readily parallelizable, requires very little hyperparameter tuning, and yields high quality predictive uncertainty estimates. Through a series of experiments on classification and regression benchmarks, we demonstrate that our method produces well-calibrated uncertainty estimates which are as good or better than approximate Bayesian NNs. To assess robustness to dataset shift, we evaluate the predictive uncertainty on test examples from known and unknown distributions, and show that our method is able to express higher uncertainty on out-of-distribution examples. We demonstrate the scalability of our method by evaluating predictive uncertainty estimates on ImageNet.}, file = {:/home/jim/Documents/Studium/MA/Literatur/08_simple-and-scalable-predictive-uncertainty-estimation-using-deep-ensembles_lakshminarayanan.pdf:PDF}, owner = {jim}, @@ -609,4 +608,19 @@ to construct explicit models for non-normal classes. Application includes infere timestamp = {2019.02.20}, } +@InCollection{Kendall2017, + author = {Kendall, Alex and Gal, Yarin}, + title = {What Uncertainties Do We Need in Bayesian Deep Learning for Computer Vision?}, + booktitle = {Advances in Neural Information Processing Systems}, + year = {2017}, + editor = {I. Guyon and U. V. Luxburg and S. Bengio and H. Wallach and R. Fergus and S. Vishwanathan and R. Garnett}, + volume = {30}, + publisher = {Curran Associates, Inc.}, + pages = {5574--5584}, + abstract = {There are two major types of uncertainty one can model. Aleatoric uncertainty captures noise inherent in the observations. On the other hand, epistemic uncertainty accounts for uncertainty in the model - uncertainty which can be explained away given enough data. Traditionally it has been difficult to model epistemic uncertainty in computer vision, but with new Bayesian deep learning tools this is now possible. We study the benefits of modeling epistemic vs. aleatoric uncertainty in Bayesian deep learning models for vision tasks. For this we present a Bayesian deep learning framework combining input-dependent aleatoric uncertainty together with epistemic uncertainty. We study models under the framework with per-pixel semantic segmentation and depth regression tasks. Further, our explicit uncertainty formulation leads to new loss functions for these tasks, which can be interpreted as learned attenuation. This makes the loss more robust to noisy data, also giving new state-of-the-art results on segmentation and depth regression benchmarks.}, + file = {:/home/jim/Documents/Studium/MA/Literatur/39_uncertainties-bayesian-deep-learning.pdf:PDF}, + owner = {jim}, + timestamp = {2019.03.06}, +} + @Comment{jabref-meta: databaseType:biblatex;}