@article{MF0625610, title = "Korean End-to-end Neural Coreference Resolution with BERT", journal = "Journal of KIISE, JOK", year = "2020", issn = "2383-630X", doi = "10.5626/JOK.2020.47.10.942", author = "Kihun Kim,Cheonum Park,Changki Lee,Hyunki Kim", keywords = "deep learning,coreference resolution,natural language processing", abstract = "Coreference resolution is a natural language task that identifies a mention that is a coreference resolution in a given document and finds and clusters the mention of the same entity. In the Korean coreference resolution, a method using the end-to-end model that simultaneously performs mention detection and mention clustering, and another method pointer network using the encoder-decoder model were used. The BERT model released by Google has been applied to natural language processing tasks and has demonstrated many performance improvements. In this paper, we propose a Korean end-to-end neural coreference resolution with BERT. This model uses the KorBERT pre-trained with the Korean data and applies dependency parsing results and the named entity recognition feature to reflect the structural and semantic characteristics of the Korean language. Experimental results show that the performance of the CoNLL F1 (DEV) 71.00% and (TEST) 69.01% in the ETRI Q & A domain data set was higher than the previous studies." }