@inproceedings{61c2d2f59e594f0c90acd5c13f2cd2d7,
title = "Conceptual multi-layer neural network model for headline generation",
abstract = "Neural attention-based models have been widely used recently in headline generation by mapping source document to target headline. However, the traditional neural headline generation models utilize the first sentence of the document as the training input while ignoring the impact of the document concept information on headline generation. In this work, A new neural attention-based model called concept sensitive neural headline model is proposed, which connects the concept information of the document to input text for headline generation and achieves satisfactory results. Besides, we use a multi-layer Bi-LSTM in encoder instead of single layer. Experiments have shown that our model outperforms state-of-the-art systems on DUC-2004 and Gigaword test sets.",
keywords = "Attention-based, Concept, Multi-layer Bi-LSTM",
author = "Yidi Guo and Heyan Huang and Yang Gao and Chi Lu",
note = "Publisher Copyright: {\textcopyright} Springer International Publishing AG 2017.; 16th China National Conference on Computational Linguistics, CCL 2017 and 5th International Symposium on Natural Language Processing Based on Naturally Annotated Big Data, NLP-NABD 2017 ; Conference date: 13-10-2017 Through 15-10-2017",
year = "2017",
doi = "10.1007/978-3-319-69005-6_30",
language = "English",
isbn = "9783319690049",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "355--367",
editor = "Maosong Sun and Baobao Chang and Xiaojie Wang and Deyi Xiong",
booktitle = "Chinese Computational Linguistics and Natural Language Processing Based on Naturally Annotated Big Data - 16th China National Conference, CCL 2017 and 5th International Symposium, NLP-NABD 2017, Proceedings",
address = "Germany",
}