We present a work-in-progress snapshot of learning with a 15 billion
parameter deep learning network on HPC architectures applied to the largest
publicly available natural image and video dataset released to-date. Recent
advancements in unsupervised deep neural networks suggest that scaling up such
networks in both model and training dataset size can yield significant
improvements in the learning of concepts at the highest layers. We train our
three-layer deep neural network on the Yahoo! Flickr Creative Commons 100M
dataset. The dataset comprises approximately 99.2 million images and 800,000
user-created videos from Yahoo's Flickr image and video sharing platform.
Training of our network takes eight days on 98 GPU nodes at the High
Performance Computing Center at Lawrence Livermore National Laboratory.
Encouraging preliminary results and future research directions are presented
and discussed.
%0 Generic
%1 ni2015largescale
%A Ni, Karl
%A Pearce, Roger
%A Boakye, Kofi
%A Van Essen, Brian
%A Borth, Damian
%A Chen, Barry
%A Wang, Eric
%D 2015
%K dataset deep learning
%T Large-Scale Deep Learning on the YFCC100M Dataset
%U http://arxiv.org/abs/1502.03409
%X We present a work-in-progress snapshot of learning with a 15 billion
parameter deep learning network on HPC architectures applied to the largest
publicly available natural image and video dataset released to-date. Recent
advancements in unsupervised deep neural networks suggest that scaling up such
networks in both model and training dataset size can yield significant
improvements in the learning of concepts at the highest layers. We train our
three-layer deep neural network on the Yahoo! Flickr Creative Commons 100M
dataset. The dataset comprises approximately 99.2 million images and 800,000
user-created videos from Yahoo's Flickr image and video sharing platform.
Training of our network takes eight days on 98 GPU nodes at the High
Performance Computing Center at Lawrence Livermore National Laboratory.
Encouraging preliminary results and future research directions are presented
and discussed.
@misc{ni2015largescale,
abstract = {We present a work-in-progress snapshot of learning with a 15 billion
parameter deep learning network on HPC architectures applied to the largest
publicly available natural image and video dataset released to-date. Recent
advancements in unsupervised deep neural networks suggest that scaling up such
networks in both model and training dataset size can yield significant
improvements in the learning of concepts at the highest layers. We train our
three-layer deep neural network on the Yahoo! Flickr Creative Commons 100M
dataset. The dataset comprises approximately 99.2 million images and 800,000
user-created videos from Yahoo's Flickr image and video sharing platform.
Training of our network takes eight days on 98 GPU nodes at the High
Performance Computing Center at Lawrence Livermore National Laboratory.
Encouraging preliminary results and future research directions are presented
and discussed.},
added-at = {2017-10-05T11:17:35.000+0200},
author = {Ni, Karl and Pearce, Roger and Boakye, Kofi and Van Essen, Brian and Borth, Damian and Chen, Barry and Wang, Eric},
biburl = {https://www.bibsonomy.org/bibtex/2cd3b2265216551662388901125c94102/hotho},
description = {Large-Scale Deep Learning on the YFCC100M Dataset},
interhash = {7b13677babc34107ad16774a1861959b},
intrahash = {cd3b2265216551662388901125c94102},
keywords = {dataset deep learning},
note = {cite arxiv:1502.03409},
timestamp = {2017-10-05T11:17:35.000+0200},
title = {Large-Scale Deep Learning on the YFCC100M Dataset},
url = {http://arxiv.org/abs/1502.03409},
year = 2015
}