Introduces the transformer model which is adapted by SASRec for the task of sequential recommendation.
Ссылки
Закладки
комментарий будет удален
Пожалуйста, войдите в систему, чтобы принять участие в дискуссии (добавить собственные рецензию, или комментарий)
Цитировать эту публикацию
%0 Conference Paper
%1 vaswani2017attention
%A Vaswani, Ashish
%A Shazeer, Noam
%A Parmar, Niki
%A Uszkoreit, Jakob
%A Jones, Llion
%A Gomez, Aidan N
%A Kaiser, Łukasz
%A Polosukhin, Illia
%B Advances in neural information processing systems
%D 2017
%K
%P 5998--6008
%T Attention is all you need
%U http://arxiv.org/abs/1706.03762
@inproceedings{vaswani2017attention,
added-at = {2020-07-13T17:23:43.000+0200},
author = {Vaswani, Ashish and Shazeer, Noam and Parmar, Niki and Uszkoreit, Jakob and Jones, Llion and Gomez, Aidan N and Kaiser, {\L}ukasz and Polosukhin, Illia},
biburl = {https://www.bibsonomy.org/bibtex/2a50f62f71cf1fb5a269be40e6dec54ce/s363405},
booktitle = {Advances in neural information processing systems},
interhash = {c9bf08cbcb15680c807e12a01dd8c929},
intrahash = {a50f62f71cf1fb5a269be40e6dec54ce},
keywords = {},
pages = {5998--6008},
timestamp = {2020-07-13T17:23:43.000+0200},
title = {Attention is all you need},
url = {http://arxiv.org/abs/1706.03762},
year = 2017
}