@article{article_1739598, title={The Impact of Optimizer Selection on Transformer Performance: Analyzing the Role of Model Complexity and Dataset Size}, journal={Black Sea Journal of Engineering and Science}, volume={9}, pages={13–14}, year={2025}, DOI={10.34248/bsengineering.1739598}, author={Çelik, Hilal and Katırcı, Ramazan}, keywords={Transformer mimarisi, Optimizasyon algoritmalarının karşılaştırılması, Model karmaşıklığı, Veri kümesi boyutu, Optimizasyon algoritmaları, Eğitim verimliliği, Derin öğrenme, Doğal dil işleme}, abstract={Model complexity, dataset size and optimizer choice critically influence machine learning model performance, especially in complex architectures like Transformers. This study aims to analyze the impact of seven optimizers —Adam, AdamW, AdaBelief, RMSprop, Nadam, Adagrad and SGD—across two Transformer configurations and three dataset sizes. Results show adaptive optimizers generally outperform non-adaptive ones like SGD, particularly as dataset size grows. For smaller datasets (20K, 50K), Adam, AdamW, Nadam and RMSprop perform best on low-complexity models, while AdaBelief, Adagrad and SGD excel with higher complexity. On the largest dataset (∼140K samples), Nadam and RMSprop lead in low-complexity models, whereas Adam, AdaBelief, Adagrad, SGD and AdamW do so in high-complexity models. Notably, low-complexity models train more than twice as fast and, in some cases, achieve better accuracy and lower loss than their high-complexity counterparts. This result highlighting the importance of balancing optimizer choice, dataset size and model complexity for efficiency and accuracy. These results emphasize the trade-offs associated with optimizing model efficiency and accuracy through the interplay of optimizer selection, dataset size and model complexity.}, number={1}, publisher={Karyay Karadeniz Yayımcılık Ve Organizasyon Ticaret Limited Şirketi}, organization={This work has been supported by the Scientific Research Projects Coordination Unit of the Sivas University of Science and Technology.}