@article{MFB38D951, title = "Korean Semantic Role Labeling with BERT", journal = "Journal of KIISE, JOK", year = "2020", issn = "2383-630X", doi = "10.5626/JOK.2020.47.11.1021", author = "Jangseong Bae,Changki Lee,Soojong Lim,Hyunki Kim", keywords = "Bidirectional Encoder Representations from Transformers,semantic role labeling,machine learning,language model", abstract = "Semantic role labeling is an application of natural language processing to identify relationships such as "who, what, how and why" with in a sentence. The semantic role labeling study mainly uses machine learning algorithms and the end-to-end method that excludes feature information. Recently, a language model called BERT (Bidirectional Encoder Representations from Transformers) has emerged in the natural language processing field, performing better than the state-of- the-art models in the natural language processing field. The performance of the semantic role labeling study using the end-to-end method is mainly influenced by the structure of the machine learning model or the pre-trained language model. Thus, in this paper, we apply BERT to the Korean semantic role labeling to improve the Korean semantic role labeling performance. As a result, the performance of the Korean semantic role labeling model using BERT is 85.77%, which is better than the existing Korean semantic role labeling model." }