Geometric analysis is a very capable theory to understand the influence of
the high dimensionality of the input data in machine learning (ML) and
knowledge discovery (KD). With our approach we can assess how far the
application of a specific KD/ML-algorithm to a concrete data set is prone to
the curse of dimensionality. To this end we extend V.~Pestov's axiomatic
approach to the instrinsic dimension of data sets, based on the seminal work by
M.~Gromov on concentration phenomena, and provide an adaptable and
computationally feasible model for studying observable geometric invariants
associated to features that are natural to both the data and the learning
procedure. In detail, we investigate data represented by formal contexts and
give first theoretical as well as experimental insights into the intrinsic
dimension of a concept lattice. Because of the correspondence between formal
concepts and maximal cliques in graphs, applications to social network analysis
are at hand.
%0 Journal Article
%1 hanika2018intrinsic
%A Hanika, Tom
%A Schneider, Friedrich Martin
%A Stumme, Gerd
%D 2018
%J CoRR
%K 2018 curse data dimension fca geometric intrinsic itegpub kde kdepub myown preprint publist
%T Intrinsic dimension of concept lattices
%U http://arxiv.org/abs/1801.07985
%V abs/1801.07985
%X Geometric analysis is a very capable theory to understand the influence of
the high dimensionality of the input data in machine learning (ML) and
knowledge discovery (KD). With our approach we can assess how far the
application of a specific KD/ML-algorithm to a concrete data set is prone to
the curse of dimensionality. To this end we extend V.~Pestov's axiomatic
approach to the instrinsic dimension of data sets, based on the seminal work by
M.~Gromov on concentration phenomena, and provide an adaptable and
computationally feasible model for studying observable geometric invariants
associated to features that are natural to both the data and the learning
procedure. In detail, we investigate data represented by formal contexts and
give first theoretical as well as experimental insights into the intrinsic
dimension of a concept lattice. Because of the correspondence between formal
concepts and maximal cliques in graphs, applications to social network analysis
are at hand.
@article{hanika2018intrinsic,
abstract = {Geometric analysis is a very capable theory to understand the influence of
the high dimensionality of the input data in machine learning (ML) and
knowledge discovery (KD). With our approach we can assess how far the
application of a specific KD/ML-algorithm to a concrete data set is prone to
the curse of dimensionality. To this end we extend V.~Pestov's axiomatic
approach to the instrinsic dimension of data sets, based on the seminal work by
M.~Gromov on concentration phenomena, and provide an adaptable and
computationally feasible model for studying observable geometric invariants
associated to features that are natural to both the data and the learning
procedure. In detail, we investigate data represented by formal contexts and
give first theoretical as well as experimental insights into the intrinsic
dimension of a concept lattice. Because of the correspondence between formal
concepts and maximal cliques in graphs, applications to social network analysis
are at hand.},
added-at = {2019-02-15T11:30:11.000+0100},
author = {Hanika, Tom and Schneider, Friedrich Martin and Stumme, Gerd},
biburl = {https://www.bibsonomy.org/bibtex/2a36ec328eb85d97351ecc027500b9082/stumme},
description = {Intrinsic dimension of concept lattices},
interhash = {a309d844630f13816f11bfc83562dc45},
intrahash = {a36ec328eb85d97351ecc027500b9082},
journal = {CoRR},
keywords = {2018 curse data dimension fca geometric intrinsic itegpub kde kdepub myown preprint publist},
note = {cite arxiv:1801.07985Comment: 13 pages, 3 figures},
timestamp = {2019-03-05T11:00:19.000+0100},
title = {Intrinsic dimension of concept lattices},
url = {http://arxiv.org/abs/1801.07985},
volume = {abs/1801.07985},
year = 2018
}