Measuring entity relatedness is a fundamental task for many natural language processing and information retrieval applications. Prior work often studies entity relatedness in static settings and an unsupervised manner. However, entities in real-world are often involved in many different relationships, consequently entity-relations are very dynamic over time. In this work, we propose a neural networkbased approach for dynamic entity relatedness, leveraging the collective attention as supervision. Our model is capable of learning rich and different entity representations in a joint framework. Through extensive experiments on large-scale datasets, we demonstrate that our method achieves better results than competitive baselines.
%0 Conference Paper
%1 nguyen2018neural
%A Nguyen, Tu Ngoc
%A Tran, Tuan
%A Nejdl, Wolfgang
%B Proceedings of the SIGNLL Conference on Computational Natural Language Learning (CoNLL 2018)
%D 2018
%K alexandria l3s myown sysrelevantforl3s
%R 10.18653/v1/K18-1004
%T A Trio Neural Model for Dynamic Entity Relatedness Ranking
%U https://arxiv.org/abs/1808.08316
%X Measuring entity relatedness is a fundamental task for many natural language processing and information retrieval applications. Prior work often studies entity relatedness in static settings and an unsupervised manner. However, entities in real-world are often involved in many different relationships, consequently entity-relations are very dynamic over time. In this work, we propose a neural networkbased approach for dynamic entity relatedness, leveraging the collective attention as supervision. Our model is capable of learning rich and different entity representations in a joint framework. Through extensive experiments on large-scale datasets, we demonstrate that our method achieves better results than competitive baselines.
@inproceedings{nguyen2018neural,
abstract = {Measuring entity relatedness is a fundamental task for many natural language processing and information retrieval applications. Prior work often studies entity relatedness in static settings and an unsupervised manner. However, entities in real-world are often involved in many different relationships, consequently entity-relations are very dynamic over time. In this work, we propose a neural networkbased approach for dynamic entity relatedness, leveraging the collective attention as supervision. Our model is capable of learning rich and different entity representations in a joint framework. Through extensive experiments on large-scale datasets, we demonstrate that our method achieves better results than competitive baselines.},
added-at = {2018-07-27T14:49:25.000+0200},
author = {Nguyen, Tu Ngoc and Tran, Tuan and Nejdl, Wolfgang},
biburl = {https://www.bibsonomy.org/bibtex/2688aa8a4e7214a400b72868ce58efcc6/tumeteor},
booktitle = {Proceedings of the SIGNLL Conference on Computational Natural Language Learning (CoNLL 2018)},
doi = {10.18653/v1/K18-1004},
interhash = {1ac6d5b8a7cc0b5ab7e8e575095917cd},
intrahash = {688aa8a4e7214a400b72868ce58efcc6},
keywords = {alexandria l3s myown sysrelevantforl3s},
organization = {ACL},
timestamp = {2018-10-26T00:00:23.000+0200},
title = {A Trio Neural Model for Dynamic Entity Relatedness Ranking},
url = {https://arxiv.org/abs/1808.08316},
year = 2018
}