@article{M17B510FE, title = "Title Generation Model for which Sequence-to-Sequence RNNs with Attention and Copying Mechanisms are used", journal = "Journal of KIISE, JOK", year = "2017", issn = "2383-630X", doi = "10.5626/JOK.2017.44.7.674", author = "Hyeon-gu Lee,Harksoo Kim", keywords = "sequence-to-sequence model,attention mechanism,copying mechanism,title generation,recurrent neural network", abstract = "In big-data environments wherein large amounts of text documents are produced daily, titles are very important clues that enable a prompt catching of the key ideas in documents; however, titles are absent for numerous document types such as blog articles and social-media messages. In this paper, a title-generation model for which sequence-to-sequence RNNs with attention and copying mechanisms are employed is proposed. For the proposed model, input sentences are encoded based on bi-directional GRU (gated recurrent unit) networks, and the title words are generated through a decoding of the encoded sentences with keywords that are automatically selected from the input sentences. Regarding the experiments with 93631 training-data documents and 500 test-data documents, the attention-mechanism performances are more effective (ROUGE-1: 0.1935, ROUGE-2: 0.0364, ROUGE-L: 0.1555) than those of the copying mechanism; in addition, the qualitative-evaluation radiative performance of the former is higher." }