We introduce a framework for inference in general state-space hidden Markov
models (HMMs) under likelihood misspecification. In particular, we leverage the
loss-theoretic perspective of generalized Bayesian inference (GBI) to define
generalized filtering recursions in HMMs, that can tackle the problem of
inference under model misspecification. In doing so, we arrive at principled
procedures for robust inference against observation contamination through the
$\beta$-divergence. Operationalizing the proposed framework is made possible
via sequential Monte Carlo methods (SMC). The standard particle methods, and
their associated convergence results, are readily generalized to the new
setting. We demonstrate our approach to object tracking and Gaussian process
regression problems, and observe improved performance over standard filtering
algorithms.
Beschreibung
[2002.09998] Generalized Bayesian Filtering via Sequential Monte Carlo
%0 Journal Article
%1 boustati2020generalized
%A Boustati, Ayman
%A Akyildiz, Ömer Deniz
%A Damoulas, Theodoros
%A Johansen, Adam
%D 2020
%K bayesian mcmc sampling
%T Generalized Bayesian Filtering via Sequential Monte Carlo
%U http://arxiv.org/abs/2002.09998
%X We introduce a framework for inference in general state-space hidden Markov
models (HMMs) under likelihood misspecification. In particular, we leverage the
loss-theoretic perspective of generalized Bayesian inference (GBI) to define
generalized filtering recursions in HMMs, that can tackle the problem of
inference under model misspecification. In doing so, we arrive at principled
procedures for robust inference against observation contamination through the
$\beta$-divergence. Operationalizing the proposed framework is made possible
via sequential Monte Carlo methods (SMC). The standard particle methods, and
their associated convergence results, are readily generalized to the new
setting. We demonstrate our approach to object tracking and Gaussian process
regression problems, and observe improved performance over standard filtering
algorithms.
@article{boustati2020generalized,
abstract = {We introduce a framework for inference in general state-space hidden Markov
models (HMMs) under likelihood misspecification. In particular, we leverage the
loss-theoretic perspective of generalized Bayesian inference (GBI) to define
generalized filtering recursions in HMMs, that can tackle the problem of
inference under model misspecification. In doing so, we arrive at principled
procedures for robust inference against observation contamination through the
$\beta$-divergence. Operationalizing the proposed framework is made possible
via sequential Monte Carlo methods (SMC). The standard particle methods, and
their associated convergence results, are readily generalized to the new
setting. We demonstrate our approach to object tracking and Gaussian process
regression problems, and observe improved performance over standard filtering
algorithms.},
added-at = {2020-02-26T14:17:00.000+0100},
author = {Boustati, Ayman and Akyildiz, Ömer Deniz and Damoulas, Theodoros and Johansen, Adam},
biburl = {https://www.bibsonomy.org/bibtex/2af931fa6b85f90c3e56749cdaa0c01ab/kirk86},
description = {[2002.09998] Generalized Bayesian Filtering via Sequential Monte Carlo},
interhash = {fc9fe51a9304b70a3de949df79964b96},
intrahash = {af931fa6b85f90c3e56749cdaa0c01ab},
keywords = {bayesian mcmc sampling},
note = {cite arxiv:2002.09998},
timestamp = {2020-02-26T14:17:00.000+0100},
title = {Generalized Bayesian Filtering via Sequential Monte Carlo},
url = {http://arxiv.org/abs/2002.09998},
year = 2020
}