@article{M8DC395F2, title = "Korean Ironic Expression Detector", journal = "The Transactions of the Korea Information Processing Society", year = "2024", issn = "null", doi = "https://doi.org/10.3745/TKIPS.2024.13.3.148", author = "Seung Ju Bang, Yo-Han Park, Jee Eun Kim, Kong Joo Lee", keywords = "Irony Detection, KoBERT, ChatGPT, Transfer Learning, MultiTask Learning", abstract = "Despite the increasing importance of irony and sarcasm detection in the field of natural language processing, research on the Korean language is relatively scarce compared to other languages. This study aims to experiment with various models for irony detection in Korean text. The study conducted irony detection experiments using KoBERT, a BERT-based model, and ChatGPT. For KoBERT, two methods of additional training on sentiment data were applied (Transfer Learning and MultiTask Learning). Additionally, for ChatGPT, the Few-Shot Learning technique was applied by increasing the number of example sentences entered as prompts. The results of the experiments showed that the Transfer Learning and MultiTask Learning models, which were trained with additional sentiment data, outperformed the baseline model without additional sentiment data. On the other hand, ChatGPT exhibited significantly lower performance compared to KoBERT, and increasing the number of example sentences did not lead to a noticeable improvement in performance. In conclusion, this study suggests that a model based on KoBERT is more suitable for irony detection than ChatG" }