We propose two efficient approximations to standard convolutional neural
networks: Binary-Weight-Networks and XNOR-Networks. In Binary-Weight-Networks,
the filters are approximated with binary values resulting in 32x memory saving.
In XNOR-Networks, both the filters and the input to convolutional layers are
binary. XNOR-Networks approximate convolutions using primarily binary
operations. This results in 58x faster convolutional operations and 32x memory
savings. XNOR-Nets offer the possibility of running state-of-the-art networks
on CPUs (rather than GPUs) in real-time. Our binary networks are simple,
accurate, efficient, and work on challenging visual tasks. We evaluate our
approach on the ImageNet classification task. The classification accuracy with
a Binary-Weight-Network version of AlexNet is only 2.9% less than the
full-precision AlexNet (in top-1 measure). We compare our method with recent
network binarization methods, BinaryConnect and BinaryNets, and outperform
these methods by large margins on ImageNet, more than 16% in top-1 accuracy.
%0 Generic
%1 rastegari2016xnornet
%A Rastegari, Mohammad
%A Ordonez, Vicente
%A Redmon, Joseph
%A Farhadi, Ali
%D 2016
%K binary convolutional convolve network neural neuralnet nnet xor xor-net
%T XNOR-Net: ImageNet Classification Using Binary Convolutional Neural
Networks
%U http://arxiv.org/abs/1603.05279
%X We propose two efficient approximations to standard convolutional neural
networks: Binary-Weight-Networks and XNOR-Networks. In Binary-Weight-Networks,
the filters are approximated with binary values resulting in 32x memory saving.
In XNOR-Networks, both the filters and the input to convolutional layers are
binary. XNOR-Networks approximate convolutions using primarily binary
operations. This results in 58x faster convolutional operations and 32x memory
savings. XNOR-Nets offer the possibility of running state-of-the-art networks
on CPUs (rather than GPUs) in real-time. Our binary networks are simple,
accurate, efficient, and work on challenging visual tasks. We evaluate our
approach on the ImageNet classification task. The classification accuracy with
a Binary-Weight-Network version of AlexNet is only 2.9% less than the
full-precision AlexNet (in top-1 measure). We compare our method with recent
network binarization methods, BinaryConnect and BinaryNets, and outperform
these methods by large margins on ImageNet, more than 16% in top-1 accuracy.
@misc{rastegari2016xnornet,
abstract = {We propose two efficient approximations to standard convolutional neural
networks: Binary-Weight-Networks and XNOR-Networks. In Binary-Weight-Networks,
the filters are approximated with binary values resulting in 32x memory saving.
In XNOR-Networks, both the filters and the input to convolutional layers are
binary. XNOR-Networks approximate convolutions using primarily binary
operations. This results in 58x faster convolutional operations and 32x memory
savings. XNOR-Nets offer the possibility of running state-of-the-art networks
on CPUs (rather than GPUs) in real-time. Our binary networks are simple,
accurate, efficient, and work on challenging visual tasks. We evaluate our
approach on the ImageNet classification task. The classification accuracy with
a Binary-Weight-Network version of AlexNet is only 2.9% less than the
full-precision AlexNet (in top-1 measure). We compare our method with recent
network binarization methods, BinaryConnect and BinaryNets, and outperform
these methods by large margins on ImageNet, more than 16% in top-1 accuracy.},
added-at = {2016-12-13T05:11:51.000+0100},
author = {Rastegari, Mohammad and Ordonez, Vicente and Redmon, Joseph and Farhadi, Ali},
biburl = {https://www.bibsonomy.org/bibtex/268e3c9192f8f124f1fdf9c20e717218f/bshanks},
interhash = {65b5f00c865ae8821169825192bb5e6d},
intrahash = {68e3c9192f8f124f1fdf9c20e717218f},
keywords = {binary convolutional convolve network neural neuralnet nnet xor xor-net},
note = {cite arxiv:1603.05279},
timestamp = {2020-06-15T21:39:35.000+0200},
title = {XNOR-Net: ImageNet Classification Using Binary Convolutional Neural
Networks},
url = {http://arxiv.org/abs/1603.05279},
year = 2016
}