@Article{app7121294,
AUTHOR = {Valiente, David and Gil, Arturo and Payá, Luis and Sebastián, Jose M. and Reinoso, Óscar},
TITLE = {Robust Visual Localization with Dynamic Uncertainty Management in Omnidirectional SLAM},
JOURNAL = {Applied Sciences},
VOLUME = {7},
YEAR = {2017},
NUMBER = {12},
ARTICLE NUMBER = {1294},
URL = {http://www.mdpi.com/2076-3417/7/12/1294},
ISSN = {2076-3417},
ABSTRACT = {This work presents a robust visual localization technique based on an omnidirectional monocular sensor for mobile robotics applications. We intend to overcome the non-linearities and instabilities that the camera projection systems typically introduce, which are especially relevant in catadioptric sensors. In this paper, we come up with several contributions. First, a novel strategy for the uncertainty management is developed, which accounts for a realistic visual localization technique, since it dynamically encodes the instantaneous variations and drifts on the uncertainty, by defining an information metric of the system. Secondly, an epipolar constraint adaption to the omnidirectional geometry reference is devised. Thirdly, Bayesian considerations are also implemented, in order to produce a final global metric for a consistent feature matching between images. The resulting outcomes are supported by real data experiments performed with publicly-available datasets, in order to assess the suitability of the approach and to confirm the reliability of the main contributions. Besides localization results, real visual SLAM (Simultaneous Localization and Mapping) comparison experiments with acknowledged methods are also presented, by using a public dataset and benchmark framework.},
DOI = {10.3390/app7121294}
}
|