In a conversation or a dialogue process, attention and intention play
intrinsic roles. This paper proposes a neural network based approach that
models the attention and intention processes. It essentially consists of three
recurrent networks. The encoder network is a word-level model representing
source side sentences. The intention network is a recurrent network that models
the dynamics of the intention process. The decoder network is a recurrent
network produces responses to the input from the source side. It is a language
model that is dependent on the intention and has an attention mechanism to
attend to particular source side words, when predicting a symbol in the
response. The model is trained end-to-end without labeling data. Experiments
show that this model generates natural responses to user inputs.
Описание
Attention with Intention for a Neural Network Conversation Model
%0 Generic
%1 yao2015attention
%A Yao, Kaisheng
%A Zweig, Geoffrey
%A Peng, Baolin
%D 2015
%K attention-rnn dialogue nn rnn
%T Attention with Intention for a Neural Network Conversation Model
%U http://arxiv.org/abs/1510.08565
%X In a conversation or a dialogue process, attention and intention play
intrinsic roles. This paper proposes a neural network based approach that
models the attention and intention processes. It essentially consists of three
recurrent networks. The encoder network is a word-level model representing
source side sentences. The intention network is a recurrent network that models
the dynamics of the intention process. The decoder network is a recurrent
network produces responses to the input from the source side. It is a language
model that is dependent on the intention and has an attention mechanism to
attend to particular source side words, when predicting a symbol in the
response. The model is trained end-to-end without labeling data. Experiments
show that this model generates natural responses to user inputs.
@misc{yao2015attention,
abstract = {In a conversation or a dialogue process, attention and intention play
intrinsic roles. This paper proposes a neural network based approach that
models the attention and intention processes. It essentially consists of three
recurrent networks. The encoder network is a word-level model representing
source side sentences. The intention network is a recurrent network that models
the dynamics of the intention process. The decoder network is a recurrent
network produces responses to the input from the source side. It is a language
model that is dependent on the intention and has an attention mechanism to
attend to particular source side words, when predicting a symbol in the
response. The model is trained end-to-end without labeling data. Experiments
show that this model generates natural responses to user inputs.},
added-at = {2016-12-03T03:37:16.000+0100},
author = {Yao, Kaisheng and Zweig, Geoffrey and Peng, Baolin},
biburl = {https://www.bibsonomy.org/bibtex/228c64559d10071b10a18a18be277e6f5/jkan},
description = {Attention with Intention for a Neural Network Conversation Model},
interhash = {3e11a159a2b0bbbdf8829d26bdff428b},
intrahash = {28c64559d10071b10a18a18be277e6f5},
keywords = {attention-rnn dialogue nn rnn},
note = {cite arxiv:1510.08565},
timestamp = {2016-12-03T03:37:16.000+0100},
title = {Attention with Intention for a Neural Network Conversation Model},
url = {http://arxiv.org/abs/1510.08565},
year = 2015
}