An important strength of learning classifier systems (LCSs) lies in
the combination of
genetic optimization techniques with gradient-based approximation
techniques.
The chosen approximation technique develops locally optimal approximations,
such as accurate classification estimates, Q-value predictions, or
linear function approximations.
The genetic optimization technique is designed to
distribute these local approximations efficiently over the problem
space.
Together, the two components develop a distributed, locally optimized
problem solution in the form of a population of expert rules, often
called classifiers.
In function approximation problems,
the XCSF classifier system develops a problem solution in the form
of overlapping, piecewise linear approximations.
This paper shows that XCSF performance on function approximation problems
additively benefits from
(1) improved representations,
(2) improved genetic operators, and
(3) improved approximation techniques.
Additionally, this paper introduces a novel closest classifier matching
mechanism for
the efficient compaction of XCS's final problem solution.
The resulting compaction mechanism can boil the population size down
by
90\% on average, while decreasing prediction accuracy only marginally.
Performance evaluations show that the additional mechanisms enable
XCSF to reliably, accurately,
and compactly approximate even seven dimensional functions.
Performance comparisons with other, heuristic function approximation
techniques show that XCSF
yields competitive or even superior noise-robust performance.
%0 Journal Article
%1 Butz:2008b
%A Butz, Martin Volker
%A Lanzi, Pier Luca
%A Wilson, Stewart W.
%D 2008
%J IEEE Transactions on Evolutionary Computation
%K MVButz MVButzJ
%P 355-376
%T Function Approximation With XCS: Hyperellipsoidal Conditions,
Recursive Least Squares, and Compaction
%U http://www.coboslab.psychologie.uni-wuerzburg.de/fileadmin/ext00209/user_upload/Publications/2008/ButzLanziWilson2008FunctionApproximationWithXCS.pdf
%V 12
%X An important strength of learning classifier systems (LCSs) lies in
the combination of
genetic optimization techniques with gradient-based approximation
techniques.
The chosen approximation technique develops locally optimal approximations,
such as accurate classification estimates, Q-value predictions, or
linear function approximations.
The genetic optimization technique is designed to
distribute these local approximations efficiently over the problem
space.
Together, the two components develop a distributed, locally optimized
problem solution in the form of a population of expert rules, often
called classifiers.
In function approximation problems,
the XCSF classifier system develops a problem solution in the form
of overlapping, piecewise linear approximations.
This paper shows that XCSF performance on function approximation problems
additively benefits from
(1) improved representations,
(2) improved genetic operators, and
(3) improved approximation techniques.
Additionally, this paper introduces a novel closest classifier matching
mechanism for
the efficient compaction of XCS's final problem solution.
The resulting compaction mechanism can boil the population size down
by
90\% on average, while decreasing prediction accuracy only marginally.
Performance evaluations show that the additional mechanisms enable
XCSF to reliably, accurately,
and compactly approximate even seven dimensional functions.
Performance comparisons with other, heuristic function approximation
techniques show that XCSF
yields competitive or even superior noise-robust performance.
@article{Butz:2008b,
abstract = {An important strength of learning classifier systems (LCSs) lies in
the combination of
genetic optimization techniques with gradient-based approximation
techniques.
The chosen approximation technique develops locally optimal approximations,
such as accurate classification estimates, Q-value predictions, or
linear function approximations.
The genetic optimization technique is designed to
distribute these local approximations efficiently over the problem
space.
Together, the two components develop a distributed, locally optimized
problem solution in the form of a population of expert rules, often
called classifiers.
In function approximation problems,
the XCSF classifier system develops a problem solution in the form
of overlapping, piecewise linear approximations.
This paper shows that XCSF performance on function approximation problems
additively benefits from
(1) improved representations,
(2) improved genetic operators, and
(3) improved approximation techniques.
Additionally, this paper introduces a novel closest classifier matching
mechanism for
the efficient compaction of XCS's final problem solution.
The resulting compaction mechanism can boil the population size down
by
90\% on average, while decreasing prediction accuracy only marginally.
Performance evaluations show that the additional mechanisms enable
XCSF to reliably, accurately,
and compactly approximate even seven dimensional functions.
Performance comparisons with other, heuristic function approximation
techniques show that XCSF
yields competitive or even superior noise-robust performance.},
added-at = {2009-10-06T21:32:32.000+0200},
author = {Butz, Martin Volker and Lanzi, Pier Luca and Wilson, Stewart W.},
biburl = {https://www.bibsonomy.org/bibtex/2274973a548d78cadae0601b8131c631e/butz},
description = {own papers},
interhash = {7179ee21bc802d5c59a611ab5e5a7fb1},
intrahash = {274973a548d78cadae0601b8131c631e},
journal = {IEEE Transactions on Evolutionary Computation},
keywords = {MVButz MVButzJ},
owner = {butz},
pages = {355-376},
timestamp = {2009-10-06T21:32:32.000+0200},
title = {Function Approximation With {XCS}: {H}yperellipsoidal Conditions,
Recursive Least Squares, and Compaction},
url = {http://www.coboslab.psychologie.uni-wuerzburg.de/fileadmin/ext00209/user_upload/Publications/2008/ButzLanziWilson2008FunctionApproximationWithXCS.pdf},
volume = 12,
year = 2008
}