@article{MC247E290, title = "Korean Coreference Resolution through BERT Embedding at the Morpheme Level", journal = "Journal of KIISE, JOK", year = "2023", issn = "2383-630X", doi = "10.5626/JOK.2023.50.6.495", author = "Kyeongbin Jo,Yohan Choi,Changki Lee,Jihee Ryu,Joonho Lim", keywords = "end-to-end,BERT,span representation", abstract = "Coreference resolution is a natural language processing task that identifies mentions that are subject to coreference resolution in a given document, and finds and groups the mentions that refer to the same entity. Korean coreference resolution has been mainly studied in an end-to-end method, and for this purpose, all spans must be considered as potential mentions, so memory usage and time complexity increase. In this paper, a word-level coreference resolution model that performs coreference resolution by mapping sub-tokens back to word units was applied to Korean, and the token expression of the word-level coreference resolution model is calculated through CorefBERT to reflect Korean characteristics. After that, entity name and dependency parsing features were added. As a result of the experiment, in the ETRI Q&A domain evaluation set, F1 was 70.68%, showing a 1.67% performance improvement compared to the existing end-to-end cross-reference solving model, Memory usage improved by 2.4 times, and speed increased by 1.82 times." }