Neural network based methods have obtained great
progress on a variety of natural language process-
ing tasks. However, in most previous works, the
models are learned based on single-task super-
vised objectives, which often suffer from insuffi-
cient training data. In this paper, we use the multi-
task learning framework to jointly learn across mul-
tiple related tasks. Based on recurrent neural net-
work, we propose three different mechanisms of
sharing information to model text with task-specific
and shared layers. The entire network is trained
jointly on all these tasks. Experiments on four
benchmark text classification tasks show that our
proposed models can improve the performance of a
task with the help of other related tasks.
Description
Recurrent Neural Network for Text Classification with Multi-Task Learning - Semantic Scholar
%0 Conference Paper
%1 Liu2016RecurrentNN
%A Liu, Pengfei
%A Qiu, Xipeng
%A Huang, Xuanjing
%B IJCAI
%D 2016
%K deep_learning multitask nlp sentence_classification
%T Recurrent Neural Network for Text Classification with Multi-Task Learning
%X Neural network based methods have obtained great
progress on a variety of natural language process-
ing tasks. However, in most previous works, the
models are learned based on single-task super-
vised objectives, which often suffer from insuffi-
cient training data. In this paper, we use the multi-
task learning framework to jointly learn across mul-
tiple related tasks. Based on recurrent neural net-
work, we propose three different mechanisms of
sharing information to model text with task-specific
and shared layers. The entire network is trained
jointly on all these tasks. Experiments on four
benchmark text classification tasks show that our
proposed models can improve the performance of a
task with the help of other related tasks.
@inproceedings{Liu2016RecurrentNN,
abstract = {Neural network based methods have obtained great
progress on a variety of natural language process-
ing tasks. However, in most previous works, the
models are learned based on single-task super-
vised objectives, which often suffer from insuffi-
cient training data. In this paper, we use the multi-
task learning framework to jointly learn across mul-
tiple related tasks. Based on recurrent neural net-
work, we propose three different mechanisms of
sharing information to model text with task-specific
and shared layers. The entire network is trained
jointly on all these tasks. Experiments on four
benchmark text classification tasks show that our
proposed models can improve the performance of a
task with the help of other related tasks.},
added-at = {2018-08-27T22:39:43.000+0200},
author = {Liu, Pengfei and Qiu, Xipeng and Huang, Xuanjing},
biburl = {https://www.bibsonomy.org/bibtex/29caccbc655ae34d737665c95d1e5ce45/dallmann},
booktitle = {IJCAI},
description = {Recurrent Neural Network for Text Classification with Multi-Task Learning - Semantic Scholar},
interhash = {68758b84c1e9150dccc40662d604aa75},
intrahash = {9caccbc655ae34d737665c95d1e5ce45},
keywords = {deep_learning multitask nlp sentence_classification},
timestamp = {2018-08-27T22:39:43.000+0200},
title = {Recurrent Neural Network for Text Classification with Multi-Task Learning},
year = 2016
}