The Nonlinear autoregressive exogenous (NARX) model, which predicts the
current value of a time series based upon its previous values as well as the
current and past values of multiple driving (exogenous) series, has been
studied for decades. Despite the fact that various NARX models have been
developed, few of them can capture the long-term temporal dependencies
appropriately and select the relevant driving series to make predictions. In
this paper, we propose a dual-stage attention-based recurrent neural network
(DA-RNN) to address these two issues. In the first stage, we introduce an input
attention mechanism to adaptively extract relevant driving series (a.k.a.,
input features) at each time step by referring to the previous encoder hidden
state. In the second stage, we use a temporal attention mechanism to select
relevant encoder hidden states across all time steps. With this dual-stage
attention scheme, our model can not only make predictions effectively, but can
also be easily interpreted. Thorough empirical studies based upon the SML 2010
dataset and the NASDAQ 100 Stock dataset demonstrate that the DA-RNN can
outperform state-of-the-art methods for time series prediction.
Description
[1704.02971v4] A Dual-Stage Attention-Based Recurrent Neural Network for Time Series Prediction
%0 Generic
%1 qin2017dualstage
%A Qin, Yao
%A Song, Dongjin
%A Chen, Haifeng
%A Cheng, Wei
%A Jiang, Guofei
%A Cottrell, Garrison
%D 2017
%K DeepLearning RNN TimeSeriesPrediction
%T A Dual-Stage Attention-Based Recurrent Neural Network for Time Series
Prediction
%U http://arxiv.org/abs/1704.02971
%X The Nonlinear autoregressive exogenous (NARX) model, which predicts the
current value of a time series based upon its previous values as well as the
current and past values of multiple driving (exogenous) series, has been
studied for decades. Despite the fact that various NARX models have been
developed, few of them can capture the long-term temporal dependencies
appropriately and select the relevant driving series to make predictions. In
this paper, we propose a dual-stage attention-based recurrent neural network
(DA-RNN) to address these two issues. In the first stage, we introduce an input
attention mechanism to adaptively extract relevant driving series (a.k.a.,
input features) at each time step by referring to the previous encoder hidden
state. In the second stage, we use a temporal attention mechanism to select
relevant encoder hidden states across all time steps. With this dual-stage
attention scheme, our model can not only make predictions effectively, but can
also be easily interpreted. Thorough empirical studies based upon the SML 2010
dataset and the NASDAQ 100 Stock dataset demonstrate that the DA-RNN can
outperform state-of-the-art methods for time series prediction.
@misc{qin2017dualstage,
abstract = {The Nonlinear autoregressive exogenous (NARX) model, which predicts the
current value of a time series based upon its previous values as well as the
current and past values of multiple driving (exogenous) series, has been
studied for decades. Despite the fact that various NARX models have been
developed, few of them can capture the long-term temporal dependencies
appropriately and select the relevant driving series to make predictions. In
this paper, we propose a dual-stage attention-based recurrent neural network
(DA-RNN) to address these two issues. In the first stage, we introduce an input
attention mechanism to adaptively extract relevant driving series (a.k.a.,
input features) at each time step by referring to the previous encoder hidden
state. In the second stage, we use a temporal attention mechanism to select
relevant encoder hidden states across all time steps. With this dual-stage
attention scheme, our model can not only make predictions effectively, but can
also be easily interpreted. Thorough empirical studies based upon the SML 2010
dataset and the NASDAQ 100 Stock dataset demonstrate that the DA-RNN can
outperform state-of-the-art methods for time series prediction.},
added-at = {2020-10-15T10:35:49.000+0200},
author = {Qin, Yao and Song, Dongjin and Chen, Haifeng and Cheng, Wei and Jiang, Guofei and Cottrell, Garrison},
biburl = {https://www.bibsonomy.org/bibtex/2412aa2d47b2fef90a76abe152b9af8fe/annakrause},
description = {[1704.02971v4] A Dual-Stage Attention-Based Recurrent Neural Network for Time Series Prediction},
interhash = {63e0bf41e2210a3130be0c7adac3f87b},
intrahash = {412aa2d47b2fef90a76abe152b9af8fe},
keywords = {DeepLearning RNN TimeSeriesPrediction},
note = {cite arxiv:1704.02971Comment: International Joint Conference on Artificial Intelligence (IJCAI), 2017},
timestamp = {2020-10-15T10:35:49.000+0200},
title = {A Dual-Stage Attention-Based Recurrent Neural Network for Time Series
Prediction},
url = {http://arxiv.org/abs/1704.02971},
year = 2017
}