Many state-of-the-art computer vision algorithms use large scale
convolutional neural networks (CNNs) as basic building blocks. These CNNs are
known for their huge number of parameters, high redundancy in weights, and
tremendous computing resource consumptions. This paper presents a learning
algorithm to simplify and speed up these CNNs. Specifically, we introduce a
"try-and-learn" algorithm to train pruning agents that remove unnecessary CNN
filters in a data-driven way. With the help of a novel reward function, our
agents removes a significant number of filters in CNNs while maintaining
performance at a desired level. Moreover, this method provides an easy control
of the tradeoff between network performance and its scale. Per- formance of our
algorithm is validated with comprehensive pruning experiments on several
popular CNNs for visual recognition and semantic segmentation tasks.
%0 Generic
%1 huang2018learning
%A Huang, Qiangui
%A Zhou, Kevin
%A You, Suya
%A Neumann, Ulrich
%D 2018
%K cnn pruning structural-learning
%T Learning to Prune Filters in Convolutional Neural Networks
%U http://arxiv.org/abs/1801.07365
%X Many state-of-the-art computer vision algorithms use large scale
convolutional neural networks (CNNs) as basic building blocks. These CNNs are
known for their huge number of parameters, high redundancy in weights, and
tremendous computing resource consumptions. This paper presents a learning
algorithm to simplify and speed up these CNNs. Specifically, we introduce a
"try-and-learn" algorithm to train pruning agents that remove unnecessary CNN
filters in a data-driven way. With the help of a novel reward function, our
agents removes a significant number of filters in CNNs while maintaining
performance at a desired level. Moreover, this method provides an easy control
of the tradeoff between network performance and its scale. Per- formance of our
algorithm is validated with comprehensive pruning experiments on several
popular CNNs for visual recognition and semantic segmentation tasks.
@misc{huang2018learning,
abstract = {Many state-of-the-art computer vision algorithms use large scale
convolutional neural networks (CNNs) as basic building blocks. These CNNs are
known for their huge number of parameters, high redundancy in weights, and
tremendous computing resource consumptions. This paper presents a learning
algorithm to simplify and speed up these CNNs. Specifically, we introduce a
"try-and-learn" algorithm to train pruning agents that remove unnecessary CNN
filters in a data-driven way. With the help of a novel reward function, our
agents removes a significant number of filters in CNNs while maintaining
performance at a desired level. Moreover, this method provides an easy control
of the tradeoff between network performance and its scale. Per- formance of our
algorithm is validated with comprehensive pruning experiments on several
popular CNNs for visual recognition and semantic segmentation tasks.},
added-at = {2018-01-24T16:36:51.000+0100},
author = {Huang, Qiangui and Zhou, Kevin and You, Suya and Neumann, Ulrich},
biburl = {https://www.bibsonomy.org/bibtex/2f4a2b97a9f2882396d9ae0a86967d018/rcb},
description = {1801.07365.pdf},
interhash = {58bb725fbce50b6d1dda88e131cddfab},
intrahash = {f4a2b97a9f2882396d9ae0a86967d018},
keywords = {cnn pruning structural-learning},
note = {cite arxiv:1801.07365},
timestamp = {2018-01-24T16:36:51.000+0100},
title = {Learning to Prune Filters in Convolutional Neural Networks},
url = {http://arxiv.org/abs/1801.07365},
year = 2018
}