@Article{proceedings2019015002,
AUTHOR = {Valiente, David and Payá, Luis and Sebastián, José M. and Jiménez, Luis M. and Reinoso, Oscar},
TITLE = {Dynamic Catadioptric Sensory Data Fusion for Visual Localization in Mobile Robotics},
JOURNAL = {Proceedings},
VOLUME = {15},
YEAR = {2019},
NUMBER = {1},
ARTICLE-NUMBER = {2},
URL = {https://www.mdpi.com/2504-3900/15/1/2},
ISSN = {2504-3900},
ABSTRACT = {This approach presents a localization technique within mobile robotics sustained by visual sensory data fusion. A regression inference framework is designed with the aid of informative data models of the system, together with support of probabilistic techniques such as Gaussian Processes. As a result, the visual data acquired with a catadioptric sensor is fused between poses of the robot in order to produce a probability distribution of visual information in the 3D global reference of the robot. In addition, a prediction technique based on filter gain is defined to improve the matching of visual information extracted from the probability distribution. This work reveals an enhanced matching technique for visual information in both, the image reference frame, and the 3D global reference. Real data results are presented to confirm the validity of the approach when working in a mobile robotic application for visual localization. Besides, a comparison against standard visual matching techniques is also presented. The suitability and robustness of the contributions are tested in the presented experiments.},
DOI = {10.3390/proceedings2019015002}
}