We propose Pure CapsNets (P-CapsNets) which is a generation of normal CNNs
structurally. Specifically, we make three modifications to current CapsNets.
First, we remove routing procedures from CapsNets based on the observation that
the coupling coefficients can be learned implicitly. Second, we replace the
convolutional layers in CapsNets to improve efficiency. Third, we package the
capsules into rank-3 tensors to further improve efficiency. The experiment
shows that P-CapsNets achieve better performance than CapsNets with varied
routing procedures by using significantly fewer parameters on MNIST&CIFAR10.
The high efficiency of P-CapsNets is even comparable to some deep compressing
models. For example, we achieve more than 99\% percent accuracy on MNIST by
using only 3888 parameters. We visualize the capsules as well as the
corresponding correlation matrix to show a possible way of initializing
CapsNets in the future. We also explore the adversarial robustness of
P-CapsNets compared to CNNs.
Описание
[1912.08367] P-CapsNets: a General Form of Convolutional Neural Networks
%0 Generic
%1 chen2019pcapsnets
%A Chen, Zhenhua
%A Li, Xiwen
%A Wang, Chuhua
%A Crandall, David
%D 2019
%K 2019 capsule cnn deep-learning
%T P-CapsNets: a General Form of Convolutional Neural Networks
%U http://arxiv.org/abs/1912.08367
%X We propose Pure CapsNets (P-CapsNets) which is a generation of normal CNNs
structurally. Specifically, we make three modifications to current CapsNets.
First, we remove routing procedures from CapsNets based on the observation that
the coupling coefficients can be learned implicitly. Second, we replace the
convolutional layers in CapsNets to improve efficiency. Third, we package the
capsules into rank-3 tensors to further improve efficiency. The experiment
shows that P-CapsNets achieve better performance than CapsNets with varied
routing procedures by using significantly fewer parameters on MNIST&CIFAR10.
The high efficiency of P-CapsNets is even comparable to some deep compressing
models. For example, we achieve more than 99\% percent accuracy on MNIST by
using only 3888 parameters. We visualize the capsules as well as the
corresponding correlation matrix to show a possible way of initializing
CapsNets in the future. We also explore the adversarial robustness of
P-CapsNets compared to CNNs.
@misc{chen2019pcapsnets,
abstract = {We propose Pure CapsNets (P-CapsNets) which is a generation of normal CNNs
structurally. Specifically, we make three modifications to current CapsNets.
First, we remove routing procedures from CapsNets based on the observation that
the coupling coefficients can be learned implicitly. Second, we replace the
convolutional layers in CapsNets to improve efficiency. Third, we package the
capsules into rank-3 tensors to further improve efficiency. The experiment
shows that P-CapsNets achieve better performance than CapsNets with varied
routing procedures by using significantly fewer parameters on MNIST\&CIFAR10.
The high efficiency of P-CapsNets is even comparable to some deep compressing
models. For example, we achieve more than 99\% percent accuracy on MNIST by
using only 3888 parameters. We visualize the capsules as well as the
corresponding correlation matrix to show a possible way of initializing
CapsNets in the future. We also explore the adversarial robustness of
P-CapsNets compared to CNNs.},
added-at = {2019-12-20T12:03:13.000+0100},
author = {Chen, Zhenhua and Li, Xiwen and Wang, Chuhua and Crandall, David},
biburl = {https://www.bibsonomy.org/bibtex/28c05704edd408b7aba9ea974eb116807/analyst},
description = {[1912.08367] P-CapsNets: a General Form of Convolutional Neural Networks},
interhash = {6abf6238879362413365dce2311fd0cc},
intrahash = {8c05704edd408b7aba9ea974eb116807},
keywords = {2019 capsule cnn deep-learning},
note = {cite arxiv:1912.08367},
timestamp = {2019-12-20T12:03:13.000+0100},
title = {P-CapsNets: a General Form of Convolutional Neural Networks},
url = {http://arxiv.org/abs/1912.08367},
year = 2019
}