Existing popular methods for semi-supervised learning with Graph Neural Networks (such as the Graph Convolutional Network) provably cannot learn a general class of neighborhood mixing relationships. To address this weakness, we propose a new model, MixHop, that can learn these relationships, including difference operators, by repeatedly mixing feature representations of neighbors at various distances. MixHop requires no additional memory or computational complexity, and outperforms on challenging baselines. In addition, we propose sparsity regularization that allows us to visualize how the network prioritizes neighborhood information across different graph datasets. Our analysis of the learned architectures reveals that neighborhood mixing varies per datasets.
%0 Conference Paper
%1 pmlr-v97-abu-el-haija19a
%A Abu-El-Haija, Sami
%A Perozzi, Bryan
%A Kapoor, Amol
%A Alipourfard, Nazanin
%A Lerman, Kristina
%A Harutyunyan, Hrayr
%A Steeg, Greg Ver
%A Galstyan, Aram
%B Proceedings of the 36th International Conference on Machine Learning
%D 2019
%E Chaudhuri, Kamalika
%E Salakhutdinov, Ruslan
%I PMLR
%K graph readinglist
%P 21--29
%T MixHop: Higher-Order Graph Convolutional Architectures via Sparsified Neighborhood Mixing
%U https://proceedings.mlr.press/v97/abu-el-haija19a.html
%V 97
%X Existing popular methods for semi-supervised learning with Graph Neural Networks (such as the Graph Convolutional Network) provably cannot learn a general class of neighborhood mixing relationships. To address this weakness, we propose a new model, MixHop, that can learn these relationships, including difference operators, by repeatedly mixing feature representations of neighbors at various distances. MixHop requires no additional memory or computational complexity, and outperforms on challenging baselines. In addition, we propose sparsity regularization that allows us to visualize how the network prioritizes neighborhood information across different graph datasets. Our analysis of the learned architectures reveals that neighborhood mixing varies per datasets.
@inproceedings{pmlr-v97-abu-el-haija19a,
abstract = {Existing popular methods for semi-supervised learning with Graph Neural Networks (such as the Graph Convolutional Network) provably cannot learn a general class of neighborhood mixing relationships. To address this weakness, we propose a new model, MixHop, that can learn these relationships, including difference operators, by repeatedly mixing feature representations of neighbors at various distances. MixHop requires no additional memory or computational complexity, and outperforms on challenging baselines. In addition, we propose sparsity regularization that allows us to visualize how the network prioritizes neighborhood information across different graph datasets. Our analysis of the learned architectures reveals that neighborhood mixing varies per datasets.},
added-at = {2023-08-22T12:20:13.000+0200},
author = {Abu-El-Haija, Sami and Perozzi, Bryan and Kapoor, Amol and Alipourfard, Nazanin and Lerman, Kristina and Harutyunyan, Hrayr and Steeg, Greg Ver and Galstyan, Aram},
biburl = {https://www.bibsonomy.org/bibtex/232c9a7987d1128f2c06ee7af3fdc8cb2/tobias.koopmann},
booktitle = {Proceedings of the 36th International Conference on Machine Learning},
editor = {Chaudhuri, Kamalika and Salakhutdinov, Ruslan},
interhash = {5a939c60c53c4c505ee5f1b0b6a0b8e6},
intrahash = {32c9a7987d1128f2c06ee7af3fdc8cb2},
keywords = {graph readinglist},
month = {09--15 Jun},
pages = {21--29},
pdf = {http://proceedings.mlr.press/v97/abu-el-haija19a/abu-el-haija19a.pdf},
publisher = {PMLR},
series = {Proceedings of Machine Learning Research},
timestamp = {2023-08-22T12:20:13.000+0200},
title = {{M}ix{H}op: Higher-Order Graph Convolutional Architectures via Sparsified Neighborhood Mixing},
url = {https://proceedings.mlr.press/v97/abu-el-haija19a.html},
volume = 97,
year = 2019
}