Performing link prediction using knowledge graph embedding models has become
a popular approach for knowledge graph completion. Such models employ a
transformation function that maps nodes via edges into a vector space in order
to measure the likelihood of the links. While mapping the individual nodes, the
structure of subgraphs is also transformed. Most of the embedding models
designed in Euclidean geometry usually support a single transformation type -
often translation or rotation, which is suitable for learning on graphs with
small differences in neighboring subgraphs. However, multi-relational knowledge
graphs often include multiple sub-graph structures in a neighborhood (e.g.
combinations of path and loop structures), which current embedding models do
not capture well. To tackle this problem, we propose a novel KGE model (5*E) in
projective geometry, which supports multiple simultaneous transformations -
specifically inversion, reflection, translation, rotation, and homothety. The
model has several favorable theoretical properties and subsumes the existing
approaches. It outperforms them on the most widely used link prediction
benchmarks
Description
[2006.04986] 5* Knowledge Graph Embeddings with Projective Transformations
%0 Generic
%1 nayyeri2020knowledge
%A Nayyeri, Mojtaba
%A Vahdati, Sahar
%A Aykul, Can
%A Lehmann, Jens
%D 2020
%K deeplearning embedding geometry graph network neural transformation
%T 5* Knowledge Graph Embeddings with Projective Transformations
%U http://arxiv.org/abs/2006.04986
%X Performing link prediction using knowledge graph embedding models has become
a popular approach for knowledge graph completion. Such models employ a
transformation function that maps nodes via edges into a vector space in order
to measure the likelihood of the links. While mapping the individual nodes, the
structure of subgraphs is also transformed. Most of the embedding models
designed in Euclidean geometry usually support a single transformation type -
often translation or rotation, which is suitable for learning on graphs with
small differences in neighboring subgraphs. However, multi-relational knowledge
graphs often include multiple sub-graph structures in a neighborhood (e.g.
combinations of path and loop structures), which current embedding models do
not capture well. To tackle this problem, we propose a novel KGE model (5*E) in
projective geometry, which supports multiple simultaneous transformations -
specifically inversion, reflection, translation, rotation, and homothety. The
model has several favorable theoretical properties and subsumes the existing
approaches. It outperforms them on the most widely used link prediction
benchmarks
@misc{nayyeri2020knowledge,
abstract = {Performing link prediction using knowledge graph embedding models has become
a popular approach for knowledge graph completion. Such models employ a
transformation function that maps nodes via edges into a vector space in order
to measure the likelihood of the links. While mapping the individual nodes, the
structure of subgraphs is also transformed. Most of the embedding models
designed in Euclidean geometry usually support a single transformation type -
often translation or rotation, which is suitable for learning on graphs with
small differences in neighboring subgraphs. However, multi-relational knowledge
graphs often include multiple sub-graph structures in a neighborhood (e.g.
combinations of path and loop structures), which current embedding models do
not capture well. To tackle this problem, we propose a novel KGE model (5*E) in
projective geometry, which supports multiple simultaneous transformations -
specifically inversion, reflection, translation, rotation, and homothety. The
model has several favorable theoretical properties and subsumes the existing
approaches. It outperforms them on the most widely used link prediction
benchmarks},
added-at = {2021-06-30T15:00:56.000+0200},
author = {Nayyeri, Mojtaba and Vahdati, Sahar and Aykul, Can and Lehmann, Jens},
biburl = {https://www.bibsonomy.org/bibtex/215b4d71fe528c2e2a71887040544098f/jaeschke},
description = {[2006.04986] 5* Knowledge Graph Embeddings with Projective Transformations},
interhash = {c8118624bec865d77838a8e7c68dae30},
intrahash = {15b4d71fe528c2e2a71887040544098f},
keywords = {deeplearning embedding geometry graph network neural transformation},
note = {cite arxiv:2006.04986Comment: Accepted in AAAI 2021},
timestamp = {2021-06-30T15:00:56.000+0200},
title = {5* Knowledge Graph Embeddings with Projective Transformations},
url = {http://arxiv.org/abs/2006.04986},
year = 2020
}