We study properties of Graph Convolutional Networks (GCNs) by analyzing their
behavior on standard models of random graphs, where nodes are represented by
random latent variables and edges are drawn according to a similarity kernel.
This allows us to overcome the difficulties of dealing with discrete notions
such as isomorphisms on very large graphs, by considering instead more natural
geometric aspects. We first study the convergence of GCNs to their continuous
counterpart as the number of nodes grows. Our results are fully non-asymptotic
and are valid for relatively sparse graphs with an average degree that grows
logarithmically with the number of nodes. We then analyze the stability of GCNs
to small deformations of the random graph model. In contrast to previous
studies of stability in discrete settings, our continuous setup allows us to
provide more intuitive deformation-based metrics for understanding stability,
which have proven useful for explaining the success of convolutional
representations on Euclidean domains.
Описание
[2006.01868] Convergence and Stability of Graph Convolutional Networks on Large Random Graphs
%0 Journal Article
%1 keriven2020convergence
%A Keriven, Nicolas
%A Bietti, Alberto
%A Vaiter, Samuel
%D 2020
%K convergence graphs randomized readings stable
%T Convergence and Stability of Graph Convolutional Networks on Large
Random Graphs
%U http://arxiv.org/abs/2006.01868
%X We study properties of Graph Convolutional Networks (GCNs) by analyzing their
behavior on standard models of random graphs, where nodes are represented by
random latent variables and edges are drawn according to a similarity kernel.
This allows us to overcome the difficulties of dealing with discrete notions
such as isomorphisms on very large graphs, by considering instead more natural
geometric aspects. We first study the convergence of GCNs to their continuous
counterpart as the number of nodes grows. Our results are fully non-asymptotic
and are valid for relatively sparse graphs with an average degree that grows
logarithmically with the number of nodes. We then analyze the stability of GCNs
to small deformations of the random graph model. In contrast to previous
studies of stability in discrete settings, our continuous setup allows us to
provide more intuitive deformation-based metrics for understanding stability,
which have proven useful for explaining the success of convolutional
representations on Euclidean domains.
@article{keriven2020convergence,
abstract = {We study properties of Graph Convolutional Networks (GCNs) by analyzing their
behavior on standard models of random graphs, where nodes are represented by
random latent variables and edges are drawn according to a similarity kernel.
This allows us to overcome the difficulties of dealing with discrete notions
such as isomorphisms on very large graphs, by considering instead more natural
geometric aspects. We first study the convergence of GCNs to their continuous
counterpart as the number of nodes grows. Our results are fully non-asymptotic
and are valid for relatively sparse graphs with an average degree that grows
logarithmically with the number of nodes. We then analyze the stability of GCNs
to small deformations of the random graph model. In contrast to previous
studies of stability in discrete settings, our continuous setup allows us to
provide more intuitive deformation-based metrics for understanding stability,
which have proven useful for explaining the success of convolutional
representations on Euclidean domains.},
added-at = {2020-06-04T21:37:27.000+0200},
author = {Keriven, Nicolas and Bietti, Alberto and Vaiter, Samuel},
biburl = {https://www.bibsonomy.org/bibtex/2f687bf93c4554fab1e21d9d0ddcd9132/kirk86},
description = {[2006.01868] Convergence and Stability of Graph Convolutional Networks on Large Random Graphs},
interhash = {be625efeb3f51dddd6c596f63e99b8c3},
intrahash = {f687bf93c4554fab1e21d9d0ddcd9132},
keywords = {convergence graphs randomized readings stable},
note = {cite arxiv:2006.01868},
timestamp = {2020-06-04T21:37:27.000+0200},
title = {Convergence and Stability of Graph Convolutional Networks on Large
Random Graphs},
url = {http://arxiv.org/abs/2006.01868},
year = 2020
}