@article{M39F9A96F, title = "Single Sentence Summarization with an Event Word Attention Mechanism", journal = "Journal of KIISE, JOK", year = "2020", issn = "2383-630X", doi = "10.5626/JOK.2020.47.2.155", author = "Ian Jung,Su Jeong Choi,Seyoung Park", keywords = "sentence summarization,event word,sequence-to-sequence,attention mechanism,event word attention mechanism", abstract = "The purpose of summarization is to generate short text that preserves important information in the source sentences. There are two approaches for the summarization task. One is an extractive approach and other is an abstractive approach. The extractive approach is to determine if words in a source sentence are retained or not. The abstractive approach generates the summary of a given source sentence using the neural network such as the sequence-to-sequence model and the pointer-generator. However, these approaches present a problem because such approaches omit important information such as event words. This paper proposes an event word attention mechanism for sentence summarization. Event words serve as the key meaning of a given source sentence, since they express what occurs in the source sentence. The event word attention weights are calculated by event information of each words in the source sentence and then it combines global attention mechanism. For evaluation, we used the English and Korean dataset. Experimental results show that, the model of adopting event attention outperforms the existing models." }