Hypergraphs have attracted increasing attention in recent years thanks to their flexibility in naturally modeling a broad range of systems where high-order relationships exist among their interacting parts. This survey reviews the newly born hypergraph representation learning problem, whose goal is to learn a function to project objects—most commonly nodes—of an input hyper-network into a latent space such that both the structural and relational properties of the network can be encoded and preserved. We provide a thorough overview of existing literature and offer a new taxonomy of hypergraph embedding methods by identifying three main families of techniques, i.e., spectral, proximity-preserving, and (deep) neural networks. For each family, we describe its characteristics and our insights in a single yet flexible framework and then discuss the peculiarities of individual methods, as well as their pros and cons. We then review the main tasks, datasets, and settings in which hypergraph embeddings are typically used. We finally identify and discuss open challenges that would inspire further research in this field.
%0 Journal Article
%1 10.1145/3605776
%A Antelmi, Alessia
%A Cordasco, Gennaro
%A Polato, Mirko
%A Scarano, Vittorio
%A Spagnuolo, Carmine
%A Yang, Dingqi
%C New York, NY, USA
%D 2023
%I Association for Computing Machinery
%J ACM Comput. Surv.
%K Hypergraph attention convolution embedding networks neural representation
%N 1
%R 10.1145/3605776
%T A Survey on Hypergraph Representation Learning
%U https://doi.org/10.1145/3605776
%V 56
%X Hypergraphs have attracted increasing attention in recent years thanks to their flexibility in naturally modeling a broad range of systems where high-order relationships exist among their interacting parts. This survey reviews the newly born hypergraph representation learning problem, whose goal is to learn a function to project objects—most commonly nodes—of an input hyper-network into a latent space such that both the structural and relational properties of the network can be encoded and preserved. We provide a thorough overview of existing literature and offer a new taxonomy of hypergraph embedding methods by identifying three main families of techniques, i.e., spectral, proximity-preserving, and (deep) neural networks. For each family, we describe its characteristics and our insights in a single yet flexible framework and then discuss the peculiarities of individual methods, as well as their pros and cons. We then review the main tasks, datasets, and settings in which hypergraph embeddings are typically used. We finally identify and discuss open challenges that would inspire further research in this field.
@article{10.1145/3605776,
abstract = {Hypergraphs have attracted increasing attention in recent years thanks to their flexibility in naturally modeling a broad range of systems where high-order relationships exist among their interacting parts. This survey reviews the newly born hypergraph representation learning problem, whose goal is to learn a function to project objects—most commonly nodes—of an input hyper-network into a latent space such that both the structural and relational properties of the network can be encoded and preserved. We provide a thorough overview of existing literature and offer a new taxonomy of hypergraph embedding methods by identifying three main families of techniques, i.e., spectral, proximity-preserving, and (deep) neural networks. For each family, we describe its characteristics and our insights in a single yet flexible framework and then discuss the peculiarities of individual methods, as well as their pros and cons. We then review the main tasks, datasets, and settings in which hypergraph embeddings are typically used. We finally identify and discuss open challenges that would inspire further research in this field.},
added-at = {2024-02-26T10:39:11.000+0100},
address = {New York, NY, USA},
articleno = {24},
author = {Antelmi, Alessia and Cordasco, Gennaro and Polato, Mirko and Scarano, Vittorio and Spagnuolo, Carmine and Yang, Dingqi},
biburl = {https://www.bibsonomy.org/bibtex/298c2e8467091ffec9c9657068554d3a1/tobias.koopmann},
doi = {10.1145/3605776},
interhash = {c4d6d8f73888b3f9f12feff4185df617},
intrahash = {98c2e8467091ffec9c9657068554d3a1},
issn = {0360-0300},
issue_date = {January 2024},
journal = {ACM Comput. Surv.},
keywords = {Hypergraph attention convolution embedding networks neural representation},
month = aug,
number = 1,
numpages = {38},
publisher = {Association for Computing Machinery},
timestamp = {2024-02-26T10:39:11.000+0100},
title = {A Survey on Hypergraph Representation Learning},
url = {https://doi.org/10.1145/3605776},
volume = 56,
year = 2023
}