When providing probabilistic forecasts for uncertain future events, it is common to strive for calibrated forecasts, that is, the predictive distribution should be compatible with the observed outcomes. Often, there are several competing forecasters of different skill. We extend common notions of calibration where each forecaster is analyzed individually, to stronger notions of cross-calibration where each forecaster is analyzed with respect to the other forecasters. In particular, cross-calibration distinguishes forecasters with respect to increasing information sets. We provide diagnostic tools and statistical tests to assess cross-calibration. The methods are illustrated in simulation examples and applied to probabilistic forecasts for inflation rates by the Bank of England. Computer code and supplementary material (Strähl and Ziegel, 2017a,b) are available online.
%0 Journal Article
%1 Strahl2017CrossCalibration
%A Strähl, Christof
%A Ziegel, Johanna
%D 2017
%I Institute of Mathematical Statistics
%J Electronic Journal of Statistics
%K metaverification probabilistic verification
%N 1
%P 608--639
%R 10.1214/17-ejs1244
%T Cross-calibration of probabilistic forecasts
%U https://doi.org/10.1214%2F17-ejs1244
%V 11
%X When providing probabilistic forecasts for uncertain future events, it is common to strive for calibrated forecasts, that is, the predictive distribution should be compatible with the observed outcomes. Often, there are several competing forecasters of different skill. We extend common notions of calibration where each forecaster is analyzed individually, to stronger notions of cross-calibration where each forecaster is analyzed with respect to the other forecasters. In particular, cross-calibration distinguishes forecasters with respect to increasing information sets. We provide diagnostic tools and statistical tests to assess cross-calibration. The methods are illustrated in simulation examples and applied to probabilistic forecasts for inflation rates by the Bank of England. Computer code and supplementary material (Strähl and Ziegel, 2017a,b) are available online.
@article{Strahl2017CrossCalibration,
abstract = {When providing probabilistic forecasts for uncertain future events, it is common to strive for calibrated forecasts, that is, the predictive distribution should be compatible with the observed outcomes. Often, there are several competing forecasters of different skill. We extend common notions of calibration where each forecaster is analyzed individually, to stronger notions of cross-calibration where each forecaster is analyzed with respect to the other forecasters. In particular, cross-calibration distinguishes forecasters with respect to increasing information sets. We provide diagnostic tools and statistical tests to assess cross-calibration. The methods are illustrated in simulation examples and applied to probabilistic forecasts for inflation rates by the Bank of England. Computer code and supplementary material (Strähl and Ziegel, 2017a,b) are available online.},
added-at = {2020-11-06T13:13:30.000+0100},
author = {Strähl, Christof and Ziegel, Johanna},
biburl = {https://www.bibsonomy.org/bibtex/25347649beab8549c383d3996cf127c3d/verifier},
doi = {10.1214/17-ejs1244},
interhash = {eacb3eb5c73a4918079c745ae42fd24f},
intrahash = {5347649beab8549c383d3996cf127c3d},
journal = {Electronic Journal of Statistics},
keywords = {metaverification probabilistic verification},
number = 1,
pages = {608--639},
publisher = {Institute of Mathematical Statistics},
timestamp = {2020-11-06T13:13:30.000+0100},
title = {Cross-calibration of probabilistic forecasts},
url = {https://doi.org/10.1214%2F17-ejs1244},
volume = 11,
year = 2017
}