We present StarSpace, a general-purpose neural embedding model that can solve
a wide variety of problems: labeling tasks such as text classification, ranking
tasks such as information retrieval/web search, collaborative filtering-based
or content-based recommendation, embedding of multi-relational graphs, and
learning word, sentence or document level embeddings. In each case the model
works by embedding those entities comprised of discrete features and comparing
them against each other -- learning similarities dependent on the task.
Empirical results on a number of tasks show that StarSpace is highly
competitive with existing methods, whilst also being generally applicable to
new cases where those methods are not.
%0 Generic
%1 wu2017starspace
%A Wu, Ledell
%A Fisch, Adam
%A Chopra, Sumit
%A Adams, Keith
%A Bordes, Antoine
%A Weston, Jason
%D 2017
%K deep_learning embedding
%T StarSpace: Embed All The Things!
%U http://arxiv.org/abs/1709.03856
%X We present StarSpace, a general-purpose neural embedding model that can solve
a wide variety of problems: labeling tasks such as text classification, ranking
tasks such as information retrieval/web search, collaborative filtering-based
or content-based recommendation, embedding of multi-relational graphs, and
learning word, sentence or document level embeddings. In each case the model
works by embedding those entities comprised of discrete features and comparing
them against each other -- learning similarities dependent on the task.
Empirical results on a number of tasks show that StarSpace is highly
competitive with existing methods, whilst also being generally applicable to
new cases where those methods are not.
@misc{wu2017starspace,
abstract = {We present StarSpace, a general-purpose neural embedding model that can solve
a wide variety of problems: labeling tasks such as text classification, ranking
tasks such as information retrieval/web search, collaborative filtering-based
or content-based recommendation, embedding of multi-relational graphs, and
learning word, sentence or document level embeddings. In each case the model
works by embedding those entities comprised of discrete features and comparing
them against each other -- learning similarities dependent on the task.
Empirical results on a number of tasks show that StarSpace is highly
competitive with existing methods, whilst also being generally applicable to
new cases where those methods are not.},
added-at = {2017-09-16T15:07:22.000+0200},
author = {Wu, Ledell and Fisch, Adam and Chopra, Sumit and Adams, Keith and Bordes, Antoine and Weston, Jason},
biburl = {https://www.bibsonomy.org/bibtex/267aa5262d32b09a79e591e516b55d329/crack521},
description = {StarSpace: Embed All The Things!},
interhash = {e83438f3dcdc19a3db05153e58a5c6b6},
intrahash = {67aa5262d32b09a79e591e516b55d329},
keywords = {deep_learning embedding},
note = {cite arxiv:1709.03856},
timestamp = {2017-09-16T15:07:22.000+0200},
title = {StarSpace: Embed All The Things!},
url = {http://arxiv.org/abs/1709.03856},
year = 2017
}