Over the past few years, neural networks have re-emerged as powerful
machine-learning models, yielding state-of-the-art results in fields such as
image recognition and speech processing. More recently, neural network models
started to be applied also to textual natural language signals, again with very
promising results. This tutorial surveys neural network models from the
perspective of natural language processing research, in an attempt to bring
natural-language researchers up to speed with the neural techniques. The
tutorial covers input encoding for natural language tasks, feed-forward
networks, convolutional networks, recurrent networks and recursive networks, as
well as the computation graph abstraction for automatic gradient computation.
Beschreibung
[1510.00726] A Primer on Neural Network Models for Natural Language Processing
%0 Generic
%1 goldberg2015primer
%A Goldberg, Yoav
%D 2015
%K cnn kallimachos neuralnets nlp rnn
%T A Primer on Neural Network Models for Natural Language Processing
%U http://arxiv.org/abs/1510.00726
%X Over the past few years, neural networks have re-emerged as powerful
machine-learning models, yielding state-of-the-art results in fields such as
image recognition and speech processing. More recently, neural network models
started to be applied also to textual natural language signals, again with very
promising results. This tutorial surveys neural network models from the
perspective of natural language processing research, in an attempt to bring
natural-language researchers up to speed with the neural techniques. The
tutorial covers input encoding for natural language tasks, feed-forward
networks, convolutional networks, recurrent networks and recursive networks, as
well as the computation graph abstraction for automatic gradient computation.
@misc{goldberg2015primer,
abstract = {Over the past few years, neural networks have re-emerged as powerful
machine-learning models, yielding state-of-the-art results in fields such as
image recognition and speech processing. More recently, neural network models
started to be applied also to textual natural language signals, again with very
promising results. This tutorial surveys neural network models from the
perspective of natural language processing research, in an attempt to bring
natural-language researchers up to speed with the neural techniques. The
tutorial covers input encoding for natural language tasks, feed-forward
networks, convolutional networks, recurrent networks and recursive networks, as
well as the computation graph abstraction for automatic gradient computation.},
added-at = {2017-01-18T15:32:27.000+0100},
author = {Goldberg, Yoav},
biburl = {https://www.bibsonomy.org/bibtex/29aa674995bbb6c7ef0ff91827a3a6a38/albinzehe},
description = {[1510.00726] A Primer on Neural Network Models for Natural Language Processing},
interhash = {0d4411af22df74aa8795081804889c29},
intrahash = {9aa674995bbb6c7ef0ff91827a3a6a38},
keywords = {cnn kallimachos neuralnets nlp rnn},
note = {cite arxiv:1510.00726},
timestamp = {2017-01-18T15:43:13.000+0100},
title = {A Primer on Neural Network Models for Natural Language Processing},
url = {http://arxiv.org/abs/1510.00726},
year = 2015
}