We present deep communicating agents in an encoder-decoder architecture to
address the challenges of representing a long document for abstractive
summarization. With deep communicating agents, the task of encoding a long text
is divided across multiple collaborating agents, each in charge of a subsection
of the input text. These encoders are connected to a single decoder, trained
end-to-end using reinforcement learning to generate a focused and coherent
summary. Empirical results demonstrate that multiple communicating encoders
lead to a higher quality summary compared to several strong baselines,
including those based on a single encoder or multiple non-communicating
encoders.
Описание
[1803.10357] Deep Communicating Agents for Abstractive Summarization
%0 Generic
%1 celikyilmaz2018communicating
%A Celikyilmaz, Asli
%A Bosselut, Antoine
%A He, Xiaodong
%A Choi, Yejin
%D 2018
%K deepgeneration naacl2018 neuralnet rnn
%T Deep Communicating Agents for Abstractive Summarization
%U http://arxiv.org/abs/1803.10357
%X We present deep communicating agents in an encoder-decoder architecture to
address the challenges of representing a long document for abstractive
summarization. With deep communicating agents, the task of encoding a long text
is divided across multiple collaborating agents, each in charge of a subsection
of the input text. These encoders are connected to a single decoder, trained
end-to-end using reinforcement learning to generate a focused and coherent
summary. Empirical results demonstrate that multiple communicating encoders
lead to a higher quality summary compared to several strong baselines,
including those based on a single encoder or multiple non-communicating
encoders.
@misc{celikyilmaz2018communicating,
abstract = {We present deep communicating agents in an encoder-decoder architecture to
address the challenges of representing a long document for abstractive
summarization. With deep communicating agents, the task of encoding a long text
is divided across multiple collaborating agents, each in charge of a subsection
of the input text. These encoders are connected to a single decoder, trained
end-to-end using reinforcement learning to generate a focused and coherent
summary. Empirical results demonstrate that multiple communicating encoders
lead to a higher quality summary compared to several strong baselines,
including those based on a single encoder or multiple non-communicating
encoders.},
added-at = {2018-06-01T18:23:07.000+0200},
author = {Celikyilmaz, Asli and Bosselut, Antoine and He, Xiaodong and Choi, Yejin},
biburl = {https://www.bibsonomy.org/bibtex/2b2d13cc39d62b6219570b55be363fdb0/albinzehe},
description = {[1803.10357] Deep Communicating Agents for Abstractive Summarization},
interhash = {724354a0dcbc7146d1787ddc1b87ea07},
intrahash = {b2d13cc39d62b6219570b55be363fdb0},
keywords = {deepgeneration naacl2018 neuralnet rnn},
note = {cite arxiv:1803.10357Comment: Accepted for publication at NAACL 2018},
timestamp = {2018-06-01T18:23:07.000+0200},
title = {Deep Communicating Agents for Abstractive Summarization},
url = {http://arxiv.org/abs/1803.10357},
year = 2018
}