Many Collaborative Filtering (CF) algorithms are item-based in the sense that
they analyze item-item relations in order to produce item similarities.
Recently, several works in the field of Natural Language Processing suggested
to learn a latent representation of words using neural embedding algorithms.
Among them, the Skip-gram with Negative Sampling (SGNS), also known as
Word2Vec, was shown to provide state-of-the-art results on various linguistics
tasks. In this paper, we show that item-based CF can be cast in the same
framework of neural word embedding. Inspired by SGNS, we describe a method we
name Item2Vec for item-based CF that produces embedding for items in a latent
space. The method is capable of inferring item-to-item relations even when user
information is not available. We present experimental results on large scale
datasets that demonstrate the effectiveness of the Item2Vec method and show it
is competitive with SVD.
Description
Item2Vec: Neural Item Embedding for Collaborative Filtering
%0 Generic
%1 barkan2016item2vec
%A Barkan, Oren
%A Koenigstein, Noam
%D 2016
%K collaborative embedding filtering item2vec
%T Item2Vec: Neural Item Embedding for Collaborative Filtering
%U http://arxiv.org/abs/1603.04259
%X Many Collaborative Filtering (CF) algorithms are item-based in the sense that
they analyze item-item relations in order to produce item similarities.
Recently, several works in the field of Natural Language Processing suggested
to learn a latent representation of words using neural embedding algorithms.
Among them, the Skip-gram with Negative Sampling (SGNS), also known as
Word2Vec, was shown to provide state-of-the-art results on various linguistics
tasks. In this paper, we show that item-based CF can be cast in the same
framework of neural word embedding. Inspired by SGNS, we describe a method we
name Item2Vec for item-based CF that produces embedding for items in a latent
space. The method is capable of inferring item-to-item relations even when user
information is not available. We present experimental results on large scale
datasets that demonstrate the effectiveness of the Item2Vec method and show it
is competitive with SVD.
@misc{barkan2016item2vec,
abstract = {Many Collaborative Filtering (CF) algorithms are item-based in the sense that
they analyze item-item relations in order to produce item similarities.
Recently, several works in the field of Natural Language Processing suggested
to learn a latent representation of words using neural embedding algorithms.
Among them, the Skip-gram with Negative Sampling (SGNS), also known as
Word2Vec, was shown to provide state-of-the-art results on various linguistics
tasks. In this paper, we show that item-based CF can be cast in the same
framework of neural word embedding. Inspired by SGNS, we describe a method we
name Item2Vec for item-based CF that produces embedding for items in a latent
space. The method is capable of inferring item-to-item relations even when user
information is not available. We present experimental results on large scale
datasets that demonstrate the effectiveness of the Item2Vec method and show it
is competitive with SVD.},
added-at = {2016-07-12T17:28:32.000+0200},
author = {Barkan, Oren and Koenigstein, Noam},
biburl = {https://www.bibsonomy.org/bibtex/209fc26edc98def6dae607580b4c71b86/thoni},
description = {Item2Vec: Neural Item Embedding for Collaborative Filtering},
interhash = {14c994323f4ab2583aa840da718720e0},
intrahash = {09fc26edc98def6dae607580b4c71b86},
keywords = {collaborative embedding filtering item2vec},
note = {cite arxiv:1603.04259},
timestamp = {2016-11-02T06:50:19.000+0100},
title = {Item2Vec: Neural Item Embedding for Collaborative Filtering},
url = {http://arxiv.org/abs/1603.04259},
year = 2016
}