While one of the first steps in many NLP systems is selecting what
pre-trained word embeddings to use, we argue that such a step is better left
for neural networks to figure out by themselves. To that end, we introduce
dynamic meta-embeddings, a simple yet effective method for the supervised
learning of embedding ensembles, which leads to state-of-the-art performance
within the same model class on a variety of tasks. We subsequently show how the
technique can be used to shed new light on the usage of word embeddings in NLP
systems.
Description
[1804.07983] Dynamic Meta-Embeddings for Improved Sentence Representations
%0 Generic
%1 kiela2018dynamic
%A Kiela, Douwe
%A Wang, Changhan
%A Cho, Kyunghyun
%D 2018
%K combination embedding representation sentence
%T Dynamic Meta-Embeddings for Improved Sentence Representations
%U http://arxiv.org/abs/1804.07983
%X While one of the first steps in many NLP systems is selecting what
pre-trained word embeddings to use, we argue that such a step is better left
for neural networks to figure out by themselves. To that end, we introduce
dynamic meta-embeddings, a simple yet effective method for the supervised
learning of embedding ensembles, which leads to state-of-the-art performance
within the same model class on a variety of tasks. We subsequently show how the
technique can be used to shed new light on the usage of word embeddings in NLP
systems.
@misc{kiela2018dynamic,
abstract = {While one of the first steps in many NLP systems is selecting what
pre-trained word embeddings to use, we argue that such a step is better left
for neural networks to figure out by themselves. To that end, we introduce
dynamic meta-embeddings, a simple yet effective method for the supervised
learning of embedding ensembles, which leads to state-of-the-art performance
within the same model class on a variety of tasks. We subsequently show how the
technique can be used to shed new light on the usage of word embeddings in NLP
systems.},
added-at = {2018-10-22T08:59:55.000+0200},
author = {Kiela, Douwe and Wang, Changhan and Cho, Kyunghyun},
biburl = {https://www.bibsonomy.org/bibtex/252c2774517048ca1c0f9c361cd6bf7ac/thoni},
description = {[1804.07983] Dynamic Meta-Embeddings for Improved Sentence Representations},
interhash = {e03c595e8a91d66a8515346292df95a2},
intrahash = {52c2774517048ca1c0f9c361cd6bf7ac},
keywords = {combination embedding representation sentence},
note = {cite arxiv:1804.07983Comment: EMNLP 2018},
timestamp = {2018-10-22T08:59:55.000+0200},
title = {Dynamic Meta-Embeddings for Improved Sentence Representations},
url = {http://arxiv.org/abs/1804.07983},
year = 2018
}