Generating texts which express complex ideas spanning multiple sentences
requires a structured representation of their content (document plan), but
these representations are prohibitively expensive to manually produce. In this
work, we address the problem of generating coherent multi-sentence texts from
the output of an information extraction system, and in particular a knowledge
graph. Graphical knowledge representations are ubiquitous in computing, but
pose a significant challenge for text generation techniques due to their
non-hierarchical nature, collapsing of long-distance dependencies, and
structural variety. We introduce a novel graph transforming encoder which can
leverage the relational structure of such knowledge graphs without imposing
linearization or hierarchical constraints. Incorporated into an encoder-decoder
setup, we provide an end-to-end trainable system for graph-to-text generation
that we apply to the domain of scientific text. Automatic and human evaluations
show that our technique produces more informative texts which exhibit better
document structure than competitive encoder-decoder methods.
Description
Text Generation from Knowledge Graphs with Graph Transformers
%0 Generic
%1 koncelkedziorski2019generation
%A Koncel-Kedziorski, Rik
%A Bekal, Dhanush
%A Luan, Yi
%A Lapata, Mirella
%A Hajishirzi, Hannaneh
%D 2019
%K gnn graphnetworks haifa tamar textgeneration
%T Text Generation from Knowledge Graphs with Graph Transformers
%U http://arxiv.org/abs/1904.02342
%X Generating texts which express complex ideas spanning multiple sentences
requires a structured representation of their content (document plan), but
these representations are prohibitively expensive to manually produce. In this
work, we address the problem of generating coherent multi-sentence texts from
the output of an information extraction system, and in particular a knowledge
graph. Graphical knowledge representations are ubiquitous in computing, but
pose a significant challenge for text generation techniques due to their
non-hierarchical nature, collapsing of long-distance dependencies, and
structural variety. We introduce a novel graph transforming encoder which can
leverage the relational structure of such knowledge graphs without imposing
linearization or hierarchical constraints. Incorporated into an encoder-decoder
setup, we provide an end-to-end trainable system for graph-to-text generation
that we apply to the domain of scientific text. Automatic and human evaluations
show that our technique produces more informative texts which exhibit better
document structure than competitive encoder-decoder methods.
@misc{koncelkedziorski2019generation,
abstract = {Generating texts which express complex ideas spanning multiple sentences
requires a structured representation of their content (document plan), but
these representations are prohibitively expensive to manually produce. In this
work, we address the problem of generating coherent multi-sentence texts from
the output of an information extraction system, and in particular a knowledge
graph. Graphical knowledge representations are ubiquitous in computing, but
pose a significant challenge for text generation techniques due to their
non-hierarchical nature, collapsing of long-distance dependencies, and
structural variety. We introduce a novel graph transforming encoder which can
leverage the relational structure of such knowledge graphs without imposing
linearization or hierarchical constraints. Incorporated into an encoder-decoder
setup, we provide an end-to-end trainable system for graph-to-text generation
that we apply to the domain of scientific text. Automatic and human evaluations
show that our technique produces more informative texts which exhibit better
document structure than competitive encoder-decoder methods.},
added-at = {2020-01-23T10:53:58.000+0100},
author = {Koncel-Kedziorski, Rik and Bekal, Dhanush and Luan, Yi and Lapata, Mirella and Hajishirzi, Hannaneh},
biburl = {https://www.bibsonomy.org/bibtex/2930da295eabe34c9f7e7e6d9c4d2aee7/albinzehe},
description = {Text Generation from Knowledge Graphs with Graph Transformers},
interhash = {56793527572719210ea0be668a423e53},
intrahash = {930da295eabe34c9f7e7e6d9c4d2aee7},
keywords = {gnn graphnetworks haifa tamar textgeneration},
note = {cite arxiv:1904.02342Comment: Accepted as a long paper in NAACL 2019},
timestamp = {2020-01-23T10:53:58.000+0100},
title = {Text Generation from Knowledge Graphs with Graph Transformers},
url = {http://arxiv.org/abs/1904.02342},
year = 2019
}