Recent work has found evidence that Multilingual BERT (mBERT), a
transformer-based multilingual masked language model, is capable of zero-shot
cross-lingual transfer, suggesting that some aspects of its representations are
shared cross-lingually. To better understand this overlap, we extend recent
work on finding syntactic trees in neural networks' internal representations to
the multilingual setting. We show that subspaces of mBERT representations
recover syntactic tree distances in languages other than English, and that
these subspaces are approximately shared across languages. Motivated by these
results, we present an unsupervised analysis method that provides evidence
mBERT learns representations of syntactic dependency labels, in the form of
clusters which largely agree with the Universal Dependencies taxonomy. This
evidence suggests that even without explicit supervision, multilingual masked
language models learn certain linguistic universals.
Description
Finding Universal Grammatical Relations in Multilingual BERT
%0 Generic
%1 chi2020finding
%A Chi, Ethan A.
%A Hewitt, John
%A Manning, Christopher D.
%D 2020
%K bert deep grammar learning model nlp universal
%T Finding Universal Grammatical Relations in Multilingual BERT
%U http://arxiv.org/abs/2005.04511
%X Recent work has found evidence that Multilingual BERT (mBERT), a
transformer-based multilingual masked language model, is capable of zero-shot
cross-lingual transfer, suggesting that some aspects of its representations are
shared cross-lingually. To better understand this overlap, we extend recent
work on finding syntactic trees in neural networks' internal representations to
the multilingual setting. We show that subspaces of mBERT representations
recover syntactic tree distances in languages other than English, and that
these subspaces are approximately shared across languages. Motivated by these
results, we present an unsupervised analysis method that provides evidence
mBERT learns representations of syntactic dependency labels, in the form of
clusters which largely agree with the Universal Dependencies taxonomy. This
evidence suggests that even without explicit supervision, multilingual masked
language models learn certain linguistic universals.
@misc{chi2020finding,
abstract = {Recent work has found evidence that Multilingual BERT (mBERT), a
transformer-based multilingual masked language model, is capable of zero-shot
cross-lingual transfer, suggesting that some aspects of its representations are
shared cross-lingually. To better understand this overlap, we extend recent
work on finding syntactic trees in neural networks' internal representations to
the multilingual setting. We show that subspaces of mBERT representations
recover syntactic tree distances in languages other than English, and that
these subspaces are approximately shared across languages. Motivated by these
results, we present an unsupervised analysis method that provides evidence
mBERT learns representations of syntactic dependency labels, in the form of
clusters which largely agree with the Universal Dependencies taxonomy. This
evidence suggests that even without explicit supervision, multilingual masked
language models learn certain linguistic universals.},
added-at = {2020-05-16T16:27:38.000+0200},
author = {Chi, Ethan A. and Hewitt, John and Manning, Christopher D.},
biburl = {https://www.bibsonomy.org/bibtex/26110ded12bf2cf55d4481fa98a045937/hotho},
description = {Finding Universal Grammatical Relations in Multilingual BERT},
interhash = {9bc67857e3ebdcb9bb5a8872607563d6},
intrahash = {6110ded12bf2cf55d4481fa98a045937},
keywords = {bert deep grammar learning model nlp universal},
note = {cite arxiv:2005.04511Comment: To appear in ACL 2020},
timestamp = {2020-05-16T16:27:38.000+0200},
title = {Finding Universal Grammatical Relations in Multilingual BERT},
url = {http://arxiv.org/abs/2005.04511},
year = 2020
}