@article{article_1648466, title={Efficient Adaptation of Large Language Models for Sentiment Analysis: A Fine-Tuning Approach}, journal={Journal of the Institute of Science and Technology}, volume={15}, pages={1149–1164}, year={2025}, DOI={10.21597/jist.1648466}, author={Bayat Toksöz, Seda and Işık, Gültekin}, keywords={Duygu Analizi, İnce Ayar, QLoRa (Nicelenmiş Düşük Sıralı Adaptation), LoRa (Low-Rank Adaptation), Gpt-2, Mistral-7B}, abstract={This study presents a systematic comparative analysis of sentiment classification on financial news headlines using two transformer architectures, Mistral-7B and GPT-2, fine-tuned with advanced adaptation techniques—Quantized Low-Rank Adaptation (QLoRA) and Low-Rank Adaptation (LoRA).Utilising a large-scale Finance News dataset, the models are rigorously evaluated for their ability to accurately classify headlines into positive, neutral, and negative sentiments while also considering computational efficiency. Beyond overall accuracy, we report macro‑averaged precision, recall, and F1‑score, thereby providing a fuller picture of the models’ class‑wise behaviour.Empirical findings demonstrate that the Mistral-7B-based configurations substantially outperform those based on GPT-2, with Mistral-7B-QLoRA achieving the highest accuracy (0.881) and Mistral-7B-Lo RA, with a score of 0.878, while GPT-2 models demonstrate significantly lower performance (0.519 for GPT-2-LoRA and 0.517 for GPT-2-QLoRA). Detailed analyses, incorporating confusion matrices and standard evaluation metrics, underscore the superior balance of classification performance and resource efficiency offered by Mistral-7B. The study goes on to discuss limitations, including the focus on a single financial dataset, and outlines prospects for future research, including the evaluation of additional architectures and adaptation techniques across diverse domains.This work contributes to the advancement of fine-tuning strategies for large language models, offering valuable insights for optimising sentiment analysis pipelines in resource-constrained environments.}, number={4}, publisher={Iğdır Üniversitesi}