@inproceedings{7c6b45bbd2cc4c95875c2010605a9b04,
title = "Attention in Focus: Transformer-Powered Super-Resolution for Advanced Remote Sensing",
abstract = "Transformer-based approaches have demonstrated outstanding performance in natural language processing and computer vision tasks due to their ability to manage long-range dependencies. However, when applied to super-resolution of remote sensing images, transformer-based methods often produce overly smooth results that lack necessary textural details. To overcome this challenge, we developed the Multi-Attention Residual Transformer (MART). MART utilizes a Multi-Scale Attention Module to integrate information at different scales, effectively restoring the complex details in remote sensing images. With its hybrid attention mechanism, MART captures both local and global features efficiently. Comprehensive evaluations on various remote sensing datasets reveal that MART significantly enhances image quality. Compared to widely used advanced methods, MART excels in both qualitative and quantitative metrics, effectively restoring a wide range of landmark features.",
keywords = "attention mechanism, remote sensing, super-resolution, Transformer",
author = "Xinyu Yan and Qizhi Xu and Jiuchen Chen",
note = "Publisher Copyright: {\textcopyright} 2024 IEEE.; 2024 IEEE International Conference on Control Science and Systems Engineering, ICCSSE 2024 ; Conference date: 18-10-2024 Through 20-10-2024",
year = "2024",
doi = "10.1109/ICCSSE63803.2024.10823881",
language = "English",
series = "2024 IEEE International Conference on Control Science and Systems Engineering, ICCSSE 2024",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "358--362",
booktitle = "2024 IEEE International Conference on Control Science and Systems Engineering, ICCSSE 2024",
address = "United States",
}