N. Colombo, and V. Vovk. (2020)cite arxiv:2005.07037Comment: 8 pages, 2 figures, 4 tables.
Abstract
Efficiency criteria for conformal prediction, such as observed
fuzziness (i.e., the sum of p-values associated with false labels), are
commonly used to evaluate the performance of given conformal predictors.
Here, we investigate whether it is possible to exploit efficiency criteria to
learn classifiers, both conformal predictors and point classifiers, by
using such criteria as training objective functions. The proposed idea is
implemented for the problem of binary classification of hand-written digits. By
choosing a 1-dimensional model class (with one real-valued free parameter), we
can solve the optimization problems through an (approximate) exhaustive search
over (a discrete version of) the parameter space. Our empirical results suggest
that conformal predictors trained by minimizing their observed fuzziness
perform better than conformal predictors trained in the traditional way by
minimizing the prediction error of the corresponding point classifier.
They also have a reasonable performance in terms of their prediction error on
the test set.
%0 Generic
%1 colombo2020training
%A Colombo, Nicolo
%A Vovk, Vladimir
%D 2020
%K conformal learning uncertainty
%T Training conformal predictors
%U http://arxiv.org/abs/2005.07037
%X Efficiency criteria for conformal prediction, such as observed
fuzziness (i.e., the sum of p-values associated with false labels), are
commonly used to evaluate the performance of given conformal predictors.
Here, we investigate whether it is possible to exploit efficiency criteria to
learn classifiers, both conformal predictors and point classifiers, by
using such criteria as training objective functions. The proposed idea is
implemented for the problem of binary classification of hand-written digits. By
choosing a 1-dimensional model class (with one real-valued free parameter), we
can solve the optimization problems through an (approximate) exhaustive search
over (a discrete version of) the parameter space. Our empirical results suggest
that conformal predictors trained by minimizing their observed fuzziness
perform better than conformal predictors trained in the traditional way by
minimizing the prediction error of the corresponding point classifier.
They also have a reasonable performance in terms of their prediction error on
the test set.
@misc{colombo2020training,
abstract = {Efficiency criteria for conformal prediction, such as \emph{observed
fuzziness} (i.e., the sum of p-values associated with false labels), are
commonly used to \emph{evaluate} the performance of given conformal predictors.
Here, we investigate whether it is possible to exploit efficiency criteria to
\emph{learn} classifiers, both conformal predictors and point classifiers, by
using such criteria as training objective functions. The proposed idea is
implemented for the problem of binary classification of hand-written digits. By
choosing a 1-dimensional model class (with one real-valued free parameter), we
can solve the optimization problems through an (approximate) exhaustive search
over (a discrete version of) the parameter space. Our empirical results suggest
that conformal predictors trained by minimizing their observed fuzziness
perform better than conformal predictors trained in the traditional way by
minimizing the \emph{prediction error} of the corresponding point classifier.
They also have a reasonable performance in terms of their prediction error on
the test set.},
added-at = {2020-05-15T14:30:35.000+0200},
author = {Colombo, Nicolo and Vovk, Vladimir},
biburl = {https://www.bibsonomy.org/bibtex/2d2295aa77a9724f22ebfdbf11f9f9724/kirk86},
description = {[2005.07037] Training conformal predictors},
interhash = {4cab97fcd742aad2aac0ab97131739db},
intrahash = {d2295aa77a9724f22ebfdbf11f9f9724},
keywords = {conformal learning uncertainty},
note = {cite arxiv:2005.07037Comment: 8 pages, 2 figures, 4 tables},
timestamp = {2020-05-15T14:30:35.000+0200},
title = {Training conformal predictors},
url = {http://arxiv.org/abs/2005.07037},
year = 2020
}