@inproceedings{63c7bdda75cc4dd4a5136702155c8cd7,
title = "Reading more efficiently: Multi-sentence summarization with a dual attention and copy-generator network",
abstract = "Sequence-to-sequence neural networks with attention have been widely used in text summarization as the amount of textual data has exploded in recent years. The traditional approach to automatic summarization is based only on word attention and most of them focus on generating a single sentence summarization. In this work, we propose a novel model with a dual attention that considers both sentence and word information and then generates a multi-sentence summarization word by word. Additionally, we enhance our model with a copy-generator network to solve the out-of-vocabulary (OOV) problem. The model shows significant performance gains on the CNN/DailyMail corpus compared with the baseline model. Experimental results demonstrate that our method can obtain ROUGE-1 points of 37.48, ROUGE-2 points of 16.40 and ROUGE-L points of 34.36. Our work shows that several features of our proposed model contribute to further improvements in performance.",
keywords = "Copy-generator network, Dual attention, Text summarization",
author = "Xi Zhang and Zhang, {Hua ping} and Lei Zhao",
note = "Publisher Copyright: {\textcopyright} Springer Nature Switzerland AG 2018.; 15th Pacific Rim International Conference on Artificial Intelligence, PRICAI 2018 ; Conference date: 28-08-2018 Through 31-08-2018",
year = "2018",
doi = "10.1007/978-3-319-97304-3_62",
language = "English",
isbn = "9783319973036",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "811--823",
editor = "Byeong-Ho Kang and Xin Geng",
booktitle = "PRICAI 2018",
address = "Germany",
}