The concept of entropy, firstly introduced in information theory, rapidly
became popular in many applied sciences via Shannon's formula to measure the
degree of heterogeneity among observations. A rather recent research field aims
at accounting for space in entropy measures, as a generalization when the
spatial location of occurrences ought to be accounted for. The main limit of
these developments is that all indices are computed conditional on a chosen
distance. This work follows and extends the route for including spatial
components in entropy measures. Starting from the probabilistic properties of
Shannon's entropy for categorical variables, it investigates the
characteristics of the quantities known as residual entropy and mutual
information, when space is included as a second dimension. This way, the
proposal of entropy measures based on univariate distributions is extended to
the consideration of bivariate distributions, in a setting where the
probabilistic meaning of all components is well defined. As a direct
consequence, a spatial entropy measure satisfying the additivity property is
obtained, as global residual entropy is a sum of partial entropies based on
different distance classes. Moreover, the quantity known as mutual information
measures the information brought by the inclusion of space, and also has the
property of additivity. A thorough comparative study illustrates the
superiority of the proposed indices.
Описание
[1703.06001v1] The use of spatial information in entropy measures
%0 Journal Article
%1 altieri2017spatial
%A Altieri, Linda
%A Cocchi, Daniela
%A Roli, Giulia
%D 2017
%K entropy geometry information readings theory
%T The use of spatial information in entropy measures
%U http://arxiv.org/abs/1703.06001
%X The concept of entropy, firstly introduced in information theory, rapidly
became popular in many applied sciences via Shannon's formula to measure the
degree of heterogeneity among observations. A rather recent research field aims
at accounting for space in entropy measures, as a generalization when the
spatial location of occurrences ought to be accounted for. The main limit of
these developments is that all indices are computed conditional on a chosen
distance. This work follows and extends the route for including spatial
components in entropy measures. Starting from the probabilistic properties of
Shannon's entropy for categorical variables, it investigates the
characteristics of the quantities known as residual entropy and mutual
information, when space is included as a second dimension. This way, the
proposal of entropy measures based on univariate distributions is extended to
the consideration of bivariate distributions, in a setting where the
probabilistic meaning of all components is well defined. As a direct
consequence, a spatial entropy measure satisfying the additivity property is
obtained, as global residual entropy is a sum of partial entropies based on
different distance classes. Moreover, the quantity known as mutual information
measures the information brought by the inclusion of space, and also has the
property of additivity. A thorough comparative study illustrates the
superiority of the proposed indices.
@article{altieri2017spatial,
abstract = {The concept of entropy, firstly introduced in information theory, rapidly
became popular in many applied sciences via Shannon's formula to measure the
degree of heterogeneity among observations. A rather recent research field aims
at accounting for space in entropy measures, as a generalization when the
spatial location of occurrences ought to be accounted for. The main limit of
these developments is that all indices are computed conditional on a chosen
distance. This work follows and extends the route for including spatial
components in entropy measures. Starting from the probabilistic properties of
Shannon's entropy for categorical variables, it investigates the
characteristics of the quantities known as residual entropy and mutual
information, when space is included as a second dimension. This way, the
proposal of entropy measures based on univariate distributions is extended to
the consideration of bivariate distributions, in a setting where the
probabilistic meaning of all components is well defined. As a direct
consequence, a spatial entropy measure satisfying the additivity property is
obtained, as global residual entropy is a sum of partial entropies based on
different distance classes. Moreover, the quantity known as mutual information
measures the information brought by the inclusion of space, and also has the
property of additivity. A thorough comparative study illustrates the
superiority of the proposed indices.},
added-at = {2020-03-14T00:40:08.000+0100},
author = {Altieri, Linda and Cocchi, Daniela and Roli, Giulia},
biburl = {https://www.bibsonomy.org/bibtex/27cd895ab627d2244c34aa82f87f85fa9/kirk86},
description = {[1703.06001v1] The use of spatial information in entropy measures},
interhash = {a2548abbd54f757a1edd58a6a11a45ef},
intrahash = {7cd895ab627d2244c34aa82f87f85fa9},
keywords = {entropy geometry information readings theory},
note = {cite arxiv:1703.06001Comment: 33 pages, 13 figures},
timestamp = {2020-03-14T00:40:08.000+0100},
title = {The use of spatial information in entropy measures},
url = {http://arxiv.org/abs/1703.06001},
year = 2017
}