@inproceedings{aff08076c0744cdab6f50182c43f1754,
title = "Robust Vision-based Simultaneous Localization and Mapping for Highly Dynamic Scenes",
abstract = "Vision-based simultaneous positioning and mapping technology plays a crucial role in many fields including intelligent robots, autonomous driving, etc. However, the visual front-end tracking quality and the final positioning accuracy are still unsatisfactory due to the insufficient number of feature points in scenes with light changes, dynamic objects, or insufficient texture. To address this issue, this paper proposes a vision-based positioning and trajectory mapping method optimized for dynamic scenes. We first remove the characteristic points of the dynamic object, and then fuse motion sensor data and the vision sensor data. We also optimize the proposed system in terms of computation consumption. Extensive experiments have been conducted in three different settings to validate the accuracy and speed. It has been demonstrated that the proposed algorithm can improve the positioning accuracy by 90\% and decrease the computation time by 94.1\%, making it usable in edge devices with limited computation power in real world applications.",
keywords = "Dynamic scene, Localization, Mapping, SLAM, TUM dataset",
author = "Zijian Zhang and Qiaoyu Lei and Chao Li and Zhipeng Zhuang and Bo Yan",
note = "Publisher Copyright: {\textcopyright} 2021 IEEE.; 6th International Conference on UK-China Emerging Technologies, UCET 2021 ; Conference date: 04-11-2021 Through 06-11-2021",
year = "2021",
doi = "10.1109/UCET54125.2021.9674991",
language = "English",
series = "2021 6th International Conference on UK-China Emerging Technologies, UCET 2021",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "221--228",
booktitle = "2021 6th International Conference on UK-China Emerging Technologies, UCET 2021",
address = "United States",
}