In this paper we study the limitations of parallelization in convex
optimization. A convenient approach to study parallelization is through the
prism of adaptivity which is an information theoretic measure of the
parallel runtime of an algorithm. Informally, adaptivity is the number of
sequential rounds an algorithm needs to make when it can execute
polynomially-many queries in parallel at every round. For combinatorial
optimization with black-box oracle access, the study of adaptivity has recently
led to exponential accelerations in parallel runtime and the natural question
is whether dramatic accelerations are achievable for convex optimization.
Our main result is a spoiler. We show that, in general, parallelization does
not accelerate convex optimization. In particular, for the problem of
minimizing a non-smooth Lipschitz and strongly convex function with black-box
oracle access we give information theoretic lower bounds that indicate that the
number of adaptive rounds of any randomized algorithm exactly match the upper
bounds of single-query-per-round (i.e. non-parallel) algorithms.
Описание
[1808.03880] Parallelization does not Accelerate Convex Optimization: Adaptivity Lower Bounds for Non-smooth Convex Minimization
%0 Journal Article
%1 balkanski2018parallelization
%A Balkanski, Eric
%A Singer, Yaron
%D 2018
%K convex optimization
%T Parallelization does not Accelerate Convex Optimization: Adaptivity
Lower Bounds for Non-smooth Convex Minimization
%U http://arxiv.org/abs/1808.03880
%X In this paper we study the limitations of parallelization in convex
optimization. A convenient approach to study parallelization is through the
prism of adaptivity which is an information theoretic measure of the
parallel runtime of an algorithm. Informally, adaptivity is the number of
sequential rounds an algorithm needs to make when it can execute
polynomially-many queries in parallel at every round. For combinatorial
optimization with black-box oracle access, the study of adaptivity has recently
led to exponential accelerations in parallel runtime and the natural question
is whether dramatic accelerations are achievable for convex optimization.
Our main result is a spoiler. We show that, in general, parallelization does
not accelerate convex optimization. In particular, for the problem of
minimizing a non-smooth Lipschitz and strongly convex function with black-box
oracle access we give information theoretic lower bounds that indicate that the
number of adaptive rounds of any randomized algorithm exactly match the upper
bounds of single-query-per-round (i.e. non-parallel) algorithms.
@article{balkanski2018parallelization,
abstract = {In this paper we study the limitations of parallelization in convex
optimization. A convenient approach to study parallelization is through the
prism of \emph{adaptivity} which is an information theoretic measure of the
parallel runtime of an algorithm. Informally, adaptivity is the number of
sequential rounds an algorithm needs to make when it can execute
polynomially-many queries in parallel at every round. For combinatorial
optimization with black-box oracle access, the study of adaptivity has recently
led to exponential accelerations in parallel runtime and the natural question
is whether dramatic accelerations are achievable for convex optimization.
Our main result is a spoiler. We show that, in general, parallelization does
not accelerate convex optimization. In particular, for the problem of
minimizing a non-smooth Lipschitz and strongly convex function with black-box
oracle access we give information theoretic lower bounds that indicate that the
number of adaptive rounds of any randomized algorithm exactly match the upper
bounds of single-query-per-round (i.e. non-parallel) algorithms.},
added-at = {2019-06-27T16:10:24.000+0200},
author = {Balkanski, Eric and Singer, Yaron},
biburl = {https://www.bibsonomy.org/bibtex/210d7bc7485ea933834bbd4ffb1fc03a7/kirk86},
description = {[1808.03880] Parallelization does not Accelerate Convex Optimization: Adaptivity Lower Bounds for Non-smooth Convex Minimization},
interhash = {bc29c294f3809ed270f123cb1af1ebdd},
intrahash = {10d7bc7485ea933834bbd4ffb1fc03a7},
keywords = {convex optimization},
note = {cite arxiv:1808.03880},
timestamp = {2019-06-27T16:10:24.000+0200},
title = {Parallelization does not Accelerate Convex Optimization: Adaptivity
Lower Bounds for Non-smooth Convex Minimization},
url = {http://arxiv.org/abs/1808.03880},
year = 2018
}