@inproceedings{a53644e568cf4d78b33af95983090d51,
title = "Sharing Pre-trained BERT Decoder for a Hybrid Summarization",
abstract = "Sentence selection and summary generation are two main steps to generate informative and readable summaries. However, most previous works treat them as two separated subtasks. In this paper, we propose a novel extractive-and-abstractive hybrid framework for single document summarization task by jointly learning to select sentence and rewrite summary. It first selects sentences by an extractive decoder and then generate summary according to each selected sentence by an abstractive decoder. Moreover, we apply the BERT pre-trained model as document encoder, sharing the context representations to both decoders. Experiments on the CNN/DailyMail dataset show that the proposed framework outperforms both state-of-the-art extractive and abstractive models.",
keywords = "Extractive and abstractive, Pre-trained based, Text summarization",
author = "Ran Wei and Heyan Huang and Yang Gao",
note = "Publisher Copyright: {\textcopyright} 2019, Springer Nature Switzerland AG.; 18th China National Conference on Computational Linguistics, CCL 2019 ; Conference date: 18-10-2019 Through 20-10-2019",
year = "2019",
doi = "10.1007/978-3-030-32381-3_14",
language = "English",
isbn = "9783030323806",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer",
pages = "169--180",
editor = "Maosong Sun and Yang Liu and Zhiyuan Liu and Xuanjing Huang and Heng Ji",
booktitle = "Chinese Computational Linguistics - 18th China National Conference, CCL 2019, Proceedings",
address = "Germany",
}