Transformer-based models are now widely used in NLP, but we still do not
understand a lot about their inner workings. This paper describes what is known
to date about the famous BERT model (Devlin et al. 2019), synthesizing over 40
analysis studies. We also provide an overview of the proposed modifications to
the model and its training regime. We then outline the directions for further
research.
Description
[2002.12327] A Primer in BERTology: What we know about how BERT works
%0 Journal Article
%1 rogers2020primer
%A Rogers, Anna
%A Kovaleva, Olga
%A Rumshisky, Anna
%D 2020
%K nlp survey
%T A Primer in BERTology: What we know about how BERT works
%U http://arxiv.org/abs/2002.12327
%X Transformer-based models are now widely used in NLP, but we still do not
understand a lot about their inner workings. This paper describes what is known
to date about the famous BERT model (Devlin et al. 2019), synthesizing over 40
analysis studies. We also provide an overview of the proposed modifications to
the model and its training regime. We then outline the directions for further
research.
@article{rogers2020primer,
abstract = {Transformer-based models are now widely used in NLP, but we still do not
understand a lot about their inner workings. This paper describes what is known
to date about the famous BERT model (Devlin et al. 2019), synthesizing over 40
analysis studies. We also provide an overview of the proposed modifications to
the model and its training regime. We then outline the directions for further
research.},
added-at = {2020-03-01T19:53:43.000+0100},
author = {Rogers, Anna and Kovaleva, Olga and Rumshisky, Anna},
biburl = {https://www.bibsonomy.org/bibtex/2323f7091bb957114e7c2e8e8ab706910/kirk86},
description = {[2002.12327] A Primer in BERTology: What we know about how BERT works},
interhash = {3fd0ebec97225101647108f7d48eb13b},
intrahash = {323f7091bb957114e7c2e8e8ab706910},
keywords = {nlp survey},
note = {cite arxiv:2002.12327},
timestamp = {2020-03-01T19:53:43.000+0100},
title = {A Primer in BERTology: What we know about how BERT works},
url = {http://arxiv.org/abs/2002.12327},
year = 2020
}