@article{article_1744530, title={A deep learning approach powered by GhostNet for skin cancer classification}, journal={Gümüşhane Üniversitesi Fen Bilimleri Dergisi}, volume={15}, pages={1099–1111}, year={2025}, DOI={10.17714/gumusfenbil.1744530}, author={Almasani, Heba and Türe, Hayati}, keywords={Dermoskopi görüntüleme, GhostNet, Melanom, Cilt kanseri, SMOTE-ENN}, abstract={Skin cancer, especially melanoma, continues to cause a disproportionate percentage of cancer-related deaths despite its high curability when detected early. This work presents an efficient deep-learning system aimed at the binary classification of dermoscopic images as malignant or benign using the GhostNet family of convolutional neural networks (CNNs). The publicly available ISIC-2019 dataset was used for training its extreme class imbalance was corrected using a hybrid Synthetic Minority Over-sampling Technique coupled with Edited Nearest Neighbours (SMOTE-ENN). End-to-end fine-tuning of three variants of GhostNet (V1, V2, V3) was performed with preprocessing, augmentation, and hyper-parameters kept constant to ensure a fair model-to-model comparison. Evaluation metrics used were accuracy, precision, recall, F1-score, and the area under the receiver-operating-characteristic curve (AUC). GhostNet V2, augmented with depth-wise attention, gave the strongest results: 95% accuracy, 94% malignant class recall, F1-score of 94.3%, and an AUC of 0.99. GhostNetV2 performed better than both the baseline V1 and V3 without having a parameter count that would prevent real-time inference on mobile hardware. These results of Accuracy, sensitivity, F1-score, AUC and recall show that, when combined with targeted imbalance correction, efficient architectures such as GhostNet are capable of dermatologist-level sensitivity without the computational requirements of heavier models, and thus are feasible for point-of-care or resource-constrained settings. It is complementary to our previous work with other CNNs, allowing model-to-model comparison directly.}, number={4}, publisher={Gümüşhane Üniversitesi}