Knowledge graphs are important resources for many artificial intelligence
tasks but often suffer from incompleteness. In this work, we propose to use
pre-trained language models for knowledge graph completion. We treat triples in
knowledge graphs as textual sequences and propose a novel framework named
Knowledge Graph Bidirectional Encoder Representations from Transformer
(KG-BERT) to model these triples. Our method takes entity and relation
descriptions of a triple as input and computes scoring function of the triple
with the KG-BERT language model. Experimental results on multiple benchmark
knowledge graphs show that our method can achieve state-of-the-art performance
in triple classification, link prediction and relation prediction tasks.
%0 Generic
%1 yao2019kgbert
%A Yao, Liang
%A Mao, Chengsheng
%A Luo, Yuan
%D 2019
%K bert completion graph kgbert knowledge lsx:reading-group
%T KG-BERT: BERT for Knowledge Graph Completion
%U http://arxiv.org/abs/1909.03193
%X Knowledge graphs are important resources for many artificial intelligence
tasks but often suffer from incompleteness. In this work, we propose to use
pre-trained language models for knowledge graph completion. We treat triples in
knowledge graphs as textual sequences and propose a novel framework named
Knowledge Graph Bidirectional Encoder Representations from Transformer
(KG-BERT) to model these triples. Our method takes entity and relation
descriptions of a triple as input and computes scoring function of the triple
with the KG-BERT language model. Experimental results on multiple benchmark
knowledge graphs show that our method can achieve state-of-the-art performance
in triple classification, link prediction and relation prediction tasks.
@misc{yao2019kgbert,
abstract = {Knowledge graphs are important resources for many artificial intelligence
tasks but often suffer from incompleteness. In this work, we propose to use
pre-trained language models for knowledge graph completion. We treat triples in
knowledge graphs as textual sequences and propose a novel framework named
Knowledge Graph Bidirectional Encoder Representations from Transformer
(KG-BERT) to model these triples. Our method takes entity and relation
descriptions of a triple as input and computes scoring function of the triple
with the KG-BERT language model. Experimental results on multiple benchmark
knowledge graphs show that our method can achieve state-of-the-art performance
in triple classification, link prediction and relation prediction tasks.},
added-at = {2019-10-23T16:33:01.000+0200},
author = {Yao, Liang and Mao, Chengsheng and Luo, Yuan},
biburl = {https://www.bibsonomy.org/bibtex/2f4b42d90cffec94d019bfd9458a10581/nosebrain},
interhash = {276aec6e6e47e10bfc00092aff8291d1},
intrahash = {f4b42d90cffec94d019bfd9458a10581},
keywords = {bert completion graph kgbert knowledge lsx:reading-group},
note = {cite arxiv:1909.03193},
timestamp = {2020-01-05T17:31:16.000+0100},
title = {KG-BERT: BERT for Knowledge Graph Completion},
url = {http://arxiv.org/abs/1909.03193},
year = 2019
}