The paper discusses the capabilities of large pre-trained language models and their limitations in accessing and manipulating knowledge. The authors introduce retrieval-augmented generation (RAG) models that combine pre-trained parametric and non-parametric memory for language generation. The study explores the effectiveness of RAG models in various NLP tasks and compares them with other architectures.
%0 Journal Article
%1 RAG2020
%A Lewis, Patrick
%A Perez, Ethan
%A Piktus, Aleksandra
%A Petroni, Fabio
%A Karpukhin, Vladimir
%A Goyal, Naman
%A Küttler, Heinrich
%A Lewis, Mike
%A Yih, Wen-tau
%A Rocktäschel, Tim
%A Riedel, Sebastian
%A Kiela, Douwe
%D 2020
%J arXiv:2005.11401
%K arxiv retrieval-augmented NLP related_to:b3e9c2b92748097978ba7148601e28d4 posted_with_chatgpt
%T Retrieval-Augmented Generation for Knowledge-Intensive NLP Tasks
%U https://arxiv.org/abs/2005.11401
@article{RAG2020,
added-at = {2023-09-13T23:35:43.000+0200},
author = {Lewis, Patrick and Perez, Ethan and Piktus, Aleksandra and Petroni, Fabio and Karpukhin, Vladimir and Goyal, Naman and Küttler, Heinrich and Lewis, Mike and Yih, Wen-tau and Rocktäschel, Tim and Riedel, Sebastian and Kiela, Douwe},
biburl = {https://www.bibsonomy.org/bibtex/2eaf7b844dafa872f697d394ba71cf01c/tomvoelker},
description = {The paper discusses the capabilities of large pre-trained language models and their limitations in accessing and manipulating knowledge. The authors introduce retrieval-augmented generation (RAG) models that combine pre-trained parametric and non-parametric memory for language generation. The study explores the effectiveness of RAG models in various NLP tasks and compares them with other architectures.},
interhash = {f7118e46b7fb2897339df74aeb056f34},
intrahash = {eaf7b844dafa872f697d394ba71cf01c},
journal = {arXiv:2005.11401},
keywords = {arxiv retrieval-augmented NLP related_to:b3e9c2b92748097978ba7148601e28d4 posted_with_chatgpt},
timestamp = {2023-09-13T23:35:43.000+0200},
title = {Retrieval-Augmented Generation for Knowledge-Intensive NLP Tasks},
url = {https://arxiv.org/abs/2005.11401},
year = 2020
}