@inproceedings{8d88ff789bf44a1e97d9177c2f060d3e,
title = "Instruction Tuning with LLMs for Programming Exercise Generation",
abstract = "Large language models (LLMs) have been applied to help programming education on aspects such as question answering and program repair. While they make students learn more efficiently, how to use LLMs to help increase teaching efficiency is rarely explored. In this paper, we focus on harnessing LLMs to automatically generate programming exercises with the goal of alleviating teachers{\textquoteright} workload and enhancing teaching efficiency. We first evaluate the performance of seven open-source LLMs using prompts, and then fine-tune two winning LLMs using instructions constructed with the Evol-Instruct and the ACES algorithms, respectively. Experimental results demonstrate the improved performance on the two LLMs after the instruction tuning. Additionally, our contribution encompasses the formulation of evaluation metrics and the exploration of various prompt methods.",
keywords = "Instruction Tuning, Open-source LLMs, Programming exercise generation",
author = "Guolong Zeng and Qinchen Xue and Xuesong Lu",
note = "Publisher Copyright: {\textcopyright} The Author(s), under exclusive license to Springer Nature Singapore Pte Ltd. 2024.; 21st CCF Conference on Web Information Systems and Applications in China, WISA 2024 ; Conference date: 02-08-2024 Through 04-08-2024",
year = "2024",
doi = "10.1007/978-981-97-7707-5\_31",
language = "英语",
isbn = "9789819777068",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Science and Business Media Deutschland GmbH",
pages = "377--385",
editor = "Cheqing Jin and Shiyu Yang and Xuequn Shang and Haofen Wang and Yong Zhang",
booktitle = "Web Information Systems and Applications - 21st International Conference, WISA 2024, Proceedings",
address = "德国",
}