In the past few years we have seen the meteoric appearance of dozens of foundation models of the Transformer family, all of which have memorable and sometimes funny, but not self-explanatory, names. The goal of this paper is to offer a somewhat comprehensive but simple catalog and classification of the most popular Transformer models. The paper also includes an introduction to the most important aspects and innovations in Transformer models. Our catalog will include models that are trained using self-supervised learning (e.g., BERT or GPT3) as well as those that are further trained using a human-in-the-loop (e.g. the InstructGPT model used by ChatGPT).
arXiv.org Snapshot:/Users/pascal/Zotero/storage/VWHD8XPM/2302.html:text/html;Full Text PDF:/Users/pascal/Zotero/storage/RI7Q72IX/Amatriain - 2023 - Transformer models an introduction and catalog.pdf:application/pdf
%0 Generic
%1 amatriain_transformer_2023
%A Amatriain, Xavier
%D 2023
%I arXiv
%K - Computation Computer Language Science and ecomodelling
%T Transformer models: an introduction and catalog
%U http://arxiv.org/abs/2302.07730
%X In the past few years we have seen the meteoric appearance of dozens of foundation models of the Transformer family, all of which have memorable and sometimes funny, but not self-explanatory, names. The goal of this paper is to offer a somewhat comprehensive but simple catalog and classification of the most popular Transformer models. The paper also includes an introduction to the most important aspects and innovations in Transformer models. Our catalog will include models that are trained using self-supervised learning (e.g., BERT or GPT3) as well as those that are further trained using a human-in-the-loop (e.g. the InstructGPT model used by ChatGPT).
@misc{amatriain_transformer_2023,
abstract = {In the past few years we have seen the meteoric appearance of dozens of foundation models of the Transformer family, all of which have memorable and sometimes funny, but not self-explanatory, names. The goal of this paper is to offer a somewhat comprehensive but simple catalog and classification of the most popular Transformer models. The paper also includes an introduction to the most important aspects and innovations in Transformer models. Our catalog will include models that are trained using self-supervised learning (e.g., BERT or GPT3) as well as those that are further trained using a human-in-the-loop (e.g. the InstructGPT model used by ChatGPT).},
added-at = {2023-07-31T08:05:54.000+0200},
author = {Amatriain, Xavier},
biburl = {https://www.bibsonomy.org/bibtex/242c9d35f9041a00c5c0e6daf67e1e5f1/jascal_panetzky},
file = {arXiv.org Snapshot:/Users/pascal/Zotero/storage/VWHD8XPM/2302.html:text/html;Full Text PDF:/Users/pascal/Zotero/storage/RI7Q72IX/Amatriain - 2023 - Transformer models an introduction and catalog.pdf:application/pdf},
interhash = {ce8c1e1b3b4370ed435210c3133ea677},
intrahash = {42c9d35f9041a00c5c0e6daf67e1e5f1},
keywords = {- Computation Computer Language Science and ecomodelling},
month = may,
note = {arXiv:2302.07730 [cs]},
publisher = {arXiv},
shorttitle = {Transformer models},
timestamp = {2023-07-31T08:07:14.000+0200},
title = {Transformer models: an introduction and catalog},
url = {http://arxiv.org/abs/2302.07730},
urldate = {2023-07-10},
year = 2023
}