One-hot CNN (convolutional neural network) has been shown to be effective for
text categorization (Johnson & Zhang, 2015). We view it as a special case of a
general framework which jointly trains a linear model with a non-linear feature
generator consisting of `text region embedding + pooling'. Under this
framework, we explore a more sophisticated region embedding method using Long
Short-Term Memory (LSTM). LSTM can embed text regions of variable (and possibly
large) sizes, whereas the region size needs to be fixed in a CNN. We seek
effective and efficient use of LSTM for this purpose in the supervised and
semi-supervised settings. The best results were obtained by combining region
embeddings in the form of LSTM and convolution layers trained on unlabeled
data. The results indicate that on this task, embeddings of text regions, which
can convey complex concepts, are more useful than embeddings of single words in
isolation. We report performances exceeding the previous best results on four
benchmark datasets.
Description
[1602.02373] Supervised and Semi-Supervised Text Categorization using LSTM for Region Embeddings
%0 Generic
%1 johnson2016supervised
%A Johnson, Rie
%A Zhang, Tong
%D 2016
%K context neuralnet sentimentanalysis spp
%T Supervised and Semi-Supervised Text Categorization using LSTM for Region
Embeddings
%U http://arxiv.org/abs/1602.02373
%X One-hot CNN (convolutional neural network) has been shown to be effective for
text categorization (Johnson & Zhang, 2015). We view it as a special case of a
general framework which jointly trains a linear model with a non-linear feature
generator consisting of `text region embedding + pooling'. Under this
framework, we explore a more sophisticated region embedding method using Long
Short-Term Memory (LSTM). LSTM can embed text regions of variable (and possibly
large) sizes, whereas the region size needs to be fixed in a CNN. We seek
effective and efficient use of LSTM for this purpose in the supervised and
semi-supervised settings. The best results were obtained by combining region
embeddings in the form of LSTM and convolution layers trained on unlabeled
data. The results indicate that on this task, embeddings of text regions, which
can convey complex concepts, are more useful than embeddings of single words in
isolation. We report performances exceeding the previous best results on four
benchmark datasets.
@misc{johnson2016supervised,
abstract = {One-hot CNN (convolutional neural network) has been shown to be effective for
text categorization (Johnson & Zhang, 2015). We view it as a special case of a
general framework which jointly trains a linear model with a non-linear feature
generator consisting of `text region embedding + pooling'. Under this
framework, we explore a more sophisticated region embedding method using Long
Short-Term Memory (LSTM). LSTM can embed text regions of variable (and possibly
large) sizes, whereas the region size needs to be fixed in a CNN. We seek
effective and efficient use of LSTM for this purpose in the supervised and
semi-supervised settings. The best results were obtained by combining region
embeddings in the form of LSTM and convolution layers trained on unlabeled
data. The results indicate that on this task, embeddings of text regions, which
can convey complex concepts, are more useful than embeddings of single words in
isolation. We report performances exceeding the previous best results on four
benchmark datasets.},
added-at = {2018-10-24T09:25:25.000+0200},
author = {Johnson, Rie and Zhang, Tong},
biburl = {https://www.bibsonomy.org/bibtex/2e5c2e13ad815bcf2219391bf6e966cf6/albinzehe},
description = {[1602.02373] Supervised and Semi-Supervised Text Categorization using LSTM for Region Embeddings},
interhash = {ea331ee20cb35f7d2f1e06cd43034970},
intrahash = {e5c2e13ad815bcf2219391bf6e966cf6},
keywords = {context neuralnet sentimentanalysis spp},
note = {cite arxiv:1602.02373},
timestamp = {2018-10-24T09:25:25.000+0200},
title = {Supervised and Semi-Supervised Text Categorization using LSTM for Region
Embeddings},
url = {http://arxiv.org/abs/1602.02373},
year = 2016
}