@article{M7C6533D8, title = "Low-Resolution Image Classification Using Knowledge Distillation From High-Resolution Image Via Self-Attention Map", journal = "Journal of KIISE, JOK", year = "2020", issn = "2383-630X", doi = "10.5626/JOK.2020.47.11.1027", author = "Sungho Shin,Joosoon Lee,Junseok Lee,Seungjun Choi,Kyoobin Lee", keywords = "low resolution image,image classification,knowledge distillation,self-attention map", abstract = "Traditional deep-learning models have been developed using high-quality images. However, when the low resolution images are rendered, the performances of the model drop drastically. To develop a deep-learning model that can respond effectively to low-resolution images, we extracted the information from the model, which uses high-resolution images as input, in the form of the Attention Map. Using the knowledge distillation technique, the information delivering Attention Map, extracted from the high-resolution images to low-resolution image models, could reduce the error rate by 2.94%, when classifying the low-resolution CIFAR images of 16×16 resolution. This was at 38.43% of the error reduction rate when the image resolution was lowered from 32×32 to 16×16, which could demonstrate excellence in this network." }