@article{M94B67E12, title = "Performance Analysis of Korean Morphological Analyzer based on Transformer and BERT", journal = "Journal of KIISE, JOK", year = "2020", issn = "2383-630X", doi = "10.5626/JOK.2020.47.8.730", author = "Yongseok Choi,Kong Joo Lee", keywords = "sequence-to-sequence,Korean morphological analyzer,Transformer,attention mechanism,copying mechanism", abstract = "This paper introduces a Korean morphological analyzer using the Transformer, which is one of the most popular sequence-to-sequence deep neural models. The Transformer comprises an encoder and a decoder. The encoder compresses a raw input sentence into a fixed-size vector, while the decoder generates a morphological analysis result for the vector. We also replace the encoder with BERT, a pre-trained language representation model. An attention mechanism and a copying mechanism are integrated in the decoder. The processing units of the encoder and the decoder are eojeol-based WordPiece and morpheme-based WordPiece, respectively. Experimental results showed that the Transformer with fine-tuned BERT outperforms the randomly initialized Transformer by 2.9% in the F1 score. We also investigated the effects of the WordPiece embedding on morphological analysis when they are not fully updated in the training phases." }