This article investigates the use of time-of-flight
(ToF) cameras in mapping tasks for autonomous mobile
robots, in particular in simultaneous localization
and mapping (SLAM) tasks. Although ToF cameras are
in principle an attractive type of sensor for
three-dimensional (3D) mapping owing to their high
rate of frames of 3D data, two features make them
difficult as mapping sensors, namely, their
restricted field of view and influences on the
quality of range measurements by high dynamics in
object reflectivity; in addition, currently
available models suffer from poor data quality in a
number of aspects. The paper first summarizes
calibration and filtering approaches for improving
the accuracy, precision, and robustness of ToF
cameras independent of their intended usage. Then,
several ego motion estimation approaches are applied
or adapted, respectively, in order to provide a
performance benchmark for registering ToF camera
data. As a part of this, an extension to the
iterative closest point algorithm has been developed
that increases the robustness under restricted field
of view and under larger displacements. Using an
indoor environment, the paper provides results from
SLAM experiments using these approaches in
comparison. It turns out that the application of ToF
cameras is feasible to SLAM tasks, although this
type of sensor has a complex error characteristic.
%0 Journal Article
%1 JFR2009_1
%A May, S.
%A Dröschel, D.
%A Holz, D.
%A Fuchs, S.
%A Malis, E.
%A Nüchter, A.
%A Hertzberg, J.
%D 2009
%J Journal of Field Robotics (JFR), Special Issue on
Three-Dimensional Mapping
%K imported
%N 11--12
%P 892--914
%R 10.1002/rob.20321
%T 3D Mapping with Time-of-Flight Cameras
%U https://robotik.informatik.uni-wuerzburg.de/telematics/download/jfr2009_1.pdf
%V 26
%X This article investigates the use of time-of-flight
(ToF) cameras in mapping tasks for autonomous mobile
robots, in particular in simultaneous localization
and mapping (SLAM) tasks. Although ToF cameras are
in principle an attractive type of sensor for
three-dimensional (3D) mapping owing to their high
rate of frames of 3D data, two features make them
difficult as mapping sensors, namely, their
restricted field of view and influences on the
quality of range measurements by high dynamics in
object reflectivity; in addition, currently
available models suffer from poor data quality in a
number of aspects. The paper first summarizes
calibration and filtering approaches for improving
the accuracy, precision, and robustness of ToF
cameras independent of their intended usage. Then,
several ego motion estimation approaches are applied
or adapted, respectively, in order to provide a
performance benchmark for registering ToF camera
data. As a part of this, an extension to the
iterative closest point algorithm has been developed
that increases the robustness under restricted field
of view and under larger displacements. Using an
indoor environment, the paper provides results from
SLAM experiments using these approaches in
comparison. It turns out that the application of ToF
cameras is feasible to SLAM tasks, although this
type of sensor has a complex error characteristic.
@article{JFR2009_1,
abstract = {This article investigates the use of time-of-flight
(ToF) cameras in mapping tasks for autonomous mobile
robots, in particular in simultaneous localization
and mapping (SLAM) tasks. Although ToF cameras are
in principle an attractive type of sensor for
three-dimensional (3D) mapping owing to their high
rate of frames of 3D data, two features make them
difficult as mapping sensors, namely, their
restricted field of view and influences on the
quality of range measurements by high dynamics in
object reflectivity; in addition, currently
available models suffer from poor data quality in a
number of aspects. The paper first summarizes
calibration and filtering approaches for improving
the accuracy, precision, and robustness of ToF
cameras independent of their intended usage. Then,
several ego motion estimation approaches are applied
or adapted, respectively, in order to provide a
performance benchmark for registering ToF camera
data. As a part of this, an extension to the
iterative closest point algorithm has been developed
that increases the robustness under restricted field
of view and under larger displacements. Using an
indoor environment, the paper provides results from
SLAM experiments using these approaches in
comparison. It turns out that the application of ToF
cameras is feasible to SLAM tasks, although this
type of sensor has a complex error characteristic.},
added-at = {2017-09-19T13:40:53.000+0200},
author = {May, S. and Dr{\"o}schel, D. and Holz, D. and Fuchs, S. and Malis, E. and N{\"u}chter, A. and Hertzberg, J.},
biburl = {https://www.bibsonomy.org/bibtex/2b48d8a01f6a8def1852bcd570516184c/nuechter76},
doi = {10.1002/rob.20321},
interhash = {00826fde0e24e430e8347b3f8906e0a6},
intrahash = {b48d8a01f6a8def1852bcd570516184c},
journal = {Journal of Field Robotics (JFR), Special Issue on
Three-Dimensional Mapping},
keywords = {imported},
number = {11--12},
pages = {892--914},
timestamp = {2017-09-29T16:01:21.000+0200},
title = {{3D Mapping with Time-of-Flight Cameras}},
url = {https://robotik.informatik.uni-wuerzburg.de/telematics/download/jfr2009_1.pdf},
volume = 26,
year = 2009
}