We introduce a globally normalized transition-based neural network model that
achieves state-of-the-art part-of-speech tagging, dependency parsing and
sentence compression results. Our model is a simple feed-forward neural network
that operates on a task-specific transition system, yet achieves comparable or
better accuracies than recurrent models. We discuss the importance of global as
opposed to local normalization: a key insight is that the label bias problem
implies that globally normalized models can be strictly more expressive than
locally normalized models.
%0 Generic
%1 andor2016globally
%A Andor, Daniel
%A Alberti, Chris
%A Weiss, David
%A Severyn, Aliaksei
%A Presta, Alessandro
%A Ganchev, Kuzman
%A Petrov, Slav
%A Collins, Michael
%D 2016
%K gpugrant kallimachos mlnlp neuralnet parsing syntaxnet
%T Globally Normalized Transition-Based Neural Networks
%U http://arxiv.org/abs/1603.06042
%X We introduce a globally normalized transition-based neural network model that
achieves state-of-the-art part-of-speech tagging, dependency parsing and
sentence compression results. Our model is a simple feed-forward neural network
that operates on a task-specific transition system, yet achieves comparable or
better accuracies than recurrent models. We discuss the importance of global as
opposed to local normalization: a key insight is that the label bias problem
implies that globally normalized models can be strictly more expressive than
locally normalized models.
@misc{andor2016globally,
abstract = {We introduce a globally normalized transition-based neural network model that
achieves state-of-the-art part-of-speech tagging, dependency parsing and
sentence compression results. Our model is a simple feed-forward neural network
that operates on a task-specific transition system, yet achieves comparable or
better accuracies than recurrent models. We discuss the importance of global as
opposed to local normalization: a key insight is that the label bias problem
implies that globally normalized models can be strictly more expressive than
locally normalized models.},
added-at = {2017-01-23T17:52:47.000+0100},
author = {Andor, Daniel and Alberti, Chris and Weiss, David and Severyn, Aliaksei and Presta, Alessandro and Ganchev, Kuzman and Petrov, Slav and Collins, Michael},
biburl = {https://www.bibsonomy.org/bibtex/21103a5277b755925bfcf9f99bcb089ff/albinzehe},
description = {Globally Normalized Transition-Based Neural Networks},
interhash = {db3b21284fce9df6e23b83c53c0fcb4b},
intrahash = {1103a5277b755925bfcf9f99bcb089ff},
keywords = {gpugrant kallimachos mlnlp neuralnet parsing syntaxnet},
note = {cite arxiv:1603.06042},
timestamp = {2018-07-04T20:51:49.000+0200},
title = {Globally Normalized Transition-Based Neural Networks},
url = {http://arxiv.org/abs/1603.06042},
year = 2016
}