<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.4 20241031//EN"
        "https://jats.nlm.nih.gov/publishing/1.4/JATS-journalpublishing1-4.dtd">
<article  article-type="research-article"        dtd-version="1.4">
            <front>

                <journal-meta>
                                                                <journal-id>jista</journal-id>
            <journal-title-group>
                                                                                    <journal-title>Journal of Intelligent Systems: Theory and Applications</journal-title>
            </journal-title-group>
                                        <issn pub-type="epub">2651-3927</issn>
                                                                                            <publisher>
                    <publisher-name>Özer UYGUN</publisher-name>
                </publisher>
                    </journal-meta>
                <article-meta>
                                        <article-id pub-id-type="doi">10.38016/jista.1671052</article-id>
                                                                <article-categories>
                                            <subj-group  xml:lang="en">
                                                            <subject>Deep Learning</subject>
                                                    </subj-group>
                                            <subj-group  xml:lang="tr">
                                                            <subject>Derin Öğrenme</subject>
                                                    </subj-group>
                                    </article-categories>
                                                                                                                                                        <title-group>
                                                                                                                        <article-title>Automated Skin Lesion Segmentation in Medical Images Using U-Net Architectures</article-title>
                                                                                                                                                                                                <trans-title-group xml:lang="tr">
                                    <trans-title>U-Net Mimarileri Kullanılarak Tıbbi Görüntülerde Otomatik Cilt Lezyon Segmentasyonu</trans-title>
                                </trans-title-group>
                                                                                                    </title-group>
            
                                                    <contrib-group content-type="authors">
                                                                        <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0009-0009-8707-4793</contrib-id>
                                                                <name>
                                    <surname>Raza</surname>
                                    <given-names>Muhammad Owais</given-names>
                                </name>
                                                                    <aff>SAKARYA UNIVERSITY OF APPLIED SCIENCES</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0002-0420-8541</contrib-id>
                                                                <name>
                                    <surname>Garip</surname>
                                    <given-names>Zeynep</given-names>
                                </name>
                                                                    <aff>SAKARYA UYGULAMALI BİLİMLER ÜNİVERSİTESİ</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0003-0658-592X</contrib-id>
                                                                <name>
                                    <surname>Ekinci</surname>
                                    <given-names>Ekin</given-names>
                                </name>
                                                                    <aff>SAKARYA UNIVERSITY OF APPLIED SCIENCES</aff>
                                                            </contrib>
                                                                                </contrib-group>
                        
                                        <pub-date pub-type="pub" iso-8601-date="20260330">
                    <day>03</day>
                    <month>30</month>
                    <year>2026</year>
                </pub-date>
                                        <volume>9</volume>
                                        <issue>2026</issue>
                                        <fpage>1</fpage>
                                        <lpage>12</lpage>
                        
                        <history>
                                    <date date-type="received" iso-8601-date="20250411">
                        <day>04</day>
                        <month>11</month>
                        <year>2025</year>
                    </date>
                                                    <date date-type="accepted" iso-8601-date="20251002">
                        <day>10</day>
                        <month>02</month>
                        <year>2025</year>
                    </date>
                            </history>
                                        <permissions>
                    <copyright-statement>Copyright © 2018, Zeki Sistemler Teori ve Uygulamaları Dergisi</copyright-statement>
                    <copyright-year>2018</copyright-year>
                    <copyright-holder>Zeki Sistemler Teori ve Uygulamaları Dergisi</copyright-holder>
                </permissions>
            
                                                                                                <abstract><p>The biggest challenge, modern methods for laser and surgical treatments face in context of skin diseases is to find the exact boundaries of skin lesions. However, with the integration of deep learning applications with these treatments have shown a great improvement in finding the lesions boundaries. The aim of this study is to comparatively investigate the performances of U-Net and its three improved variations, Residual U-Net, Attention U-Net and Residual Attention U-Net, in skin lesion segmentation. The models were tested on two widely available public datasets, namely ISIC-2016 and ISIC-2017, and the comparison was performed using the same training parameters, image dimensions and evaluation metrics namely accuracy, Dice score and IoU. Attention U-Net model achieved the highest success on ISIC-2016 dataset with 94.4% accuracy, 81.9% Dice score and 81.5% IoU. On the ISIC-2017 dataset, the Residual Attention U-Net model showed superior performance with 92.2% accuracy, 76.9% Dice score and 69.5% IoU. The results show that attention mechanisms and residual structures provide significant contributions to the accurate segmentation of skin lesions and that these architectures have the potential to be used in clinical decision support systems.</p></abstract>
                                                                                                                                    <trans-abstract xml:lang="tr">
                            <p>Lazer ve cerrahi tedavilerde karşılaşılan en büyük zorluklardan biri, cilt hastalıkları bağlamında cilt lezyonlarının sınırlarını doğru bir şekilde belirlemektir. Ancak, derin öğrenme uygulamalarının bu tedavilerle entegre edilmesi, lezyon sınırlarının tespitinde önemli ilerlemeler sağlamıştır. Bu çalışmanın amacı, U-Net ve onun geliştirilmiş üç varyasyonu olan Residual U-Net, Attention U-Net ve Residual Attention U-Net modellerinin cilt lezyonu segmentasyonundaki performanslarını karşılaştırmalı olarak incelemektir. Modeller, ISIC-2016 ve ISIC-2017 adlı iki yaygın kamuya açık veri kümesi üzerinde test edilmiş ve karşılaştırma; aynı eğitim parametreleri, görüntü boyutları ve doğruluk, Dice skoru ve IoU değerlendirme metrikleri kullanılarak gerçekleştirilmiştir. ISIC-2016 veri kümesinde en yüksek başarıyı %94.4 doğruluk, %81.9 Dice skoru ve %81.5 IoU ile Attention U-Net modeli elde etmiştir. ISIC-2017 veri kümesinde ise Residual Attention U-Net modeli %92.2 doğruluk, %76.9 Dice skoru ve %69.5 IoU ile üstün performans göstermiştir. Sonuçlar, dikkat mekanizmalarının ve artık yapıların cilt lezyonlarının doğru segmentasyonu açısından önemli katkılar sağladığını ve bu mimarilerin klinik karar destek sistemlerinde kullanılabilir potansiyele sahip olduğunu göstermektedir.</p></trans-abstract>
                                                            
            
                                                            <kwd-group>
                                                    <kwd>Skin cancer</kwd>
                                                    <kwd>  lesion segmentation</kwd>
                                                    <kwd>  deep learning</kwd>
                                                    <kwd>  U-Net.</kwd>
                                            </kwd-group>
                                                        
                                                                            <kwd-group xml:lang="tr">
                                                    <kwd>Cilt kanseri</kwd>
                                                    <kwd>  lezyon segmentasyonu</kwd>
                                                    <kwd>  derin öğrenme</kwd>
                                                    <kwd>  U-Net.</kwd>
                                            </kwd-group>
                                                                                                            </article-meta>
    </front>
    <back>
                            <ref-list>
                                    <ref id="ref1">
                        <label>1</label>
                        <mixed-citation publication-type="journal">Albahli, S., Nida, N., Irtaza, A., Yousaf, M. H., &amp; Mahmood, M. T. 2020. Melanoma lesion detection and segmentation using YOLOv4-DarkNet and active contour. IEEE access, 8, 198403-198414.</mixed-citation>
                    </ref>
                                    <ref id="ref2">
                        <label>2</label>
                        <mixed-citation publication-type="journal">American Academy of Dermatology Association. Skin cancer in people of color.  2022. Retrieved 25 September 2024, from  https://www.aad.org/public/diseases/skin-cancer/types/common/melanoma/skin-color</mixed-citation>
                    </ref>
                                    <ref id="ref3">
                        <label>3</label>
                        <mixed-citation publication-type="journal">Codella, N. C., Gutman, D., Celebi, M. E., Helba, B., Marchetti, M. A., Dusza, S. W., ... and Halpern, A., 2018. Skin lesion analysis toward melanoma detection: A challenge at the 2017 international symposium on biomedical imaging (ISBI), hosted by the international skin imaging collaboration (ISIC). In 2018 IEEE 15th international symposium on biomedical imaging, 168-172.</mixed-citation>
                    </ref>
                                    <ref id="ref4">
                        <label>4</label>
                        <mixed-citation publication-type="journal">Dai, D., Dong, C., Xu, S., Yan, Q., Li, Z., Zhang, C., Luo, N., 2022. Ms RED: A novel multi-scale residual encoding and decoding network for skin lesion segmentation. Medical image analysis, 75, 102293.</mixed-citation>
                    </ref>
                                    <ref id="ref5">
                        <label>5</label>
                        <mixed-citation publication-type="journal">Ding, Y., Yi, Z., Xiao, J., Hu, M., Guo, Y., Liao, Z., and Wang, Y., 2024. CTH-Net: A CNN and Transformer hybrid network for skin lesion segmentation. Iscience, 27(4).</mixed-citation>
                    </ref>
                                    <ref id="ref6">
                        <label>6</label>
                        <mixed-citation publication-type="journal">Garcia-Arroyo, J. L., &amp; Garcia-Zapirain, B. 2019. Segmentation of skin lesions in dermoscopy images using fuzzy classification of pixels and histogram thresholding. Computer methods and programs in biomedicine, 168, 11-19.</mixed-citation>
                    </ref>
                                    <ref id="ref7">
                        <label>7</label>
                        <mixed-citation publication-type="journal">Gonzalez-Diaz, I., 2018. Dermaknet: Incorporating the knowledge of dermatologists to convolutional neural networks for skin lesion diagnosis. IEEE journal of biomedical and health informatics. 23(2), 547-559.</mixed-citation>
                    </ref>
                                    <ref id="ref8">
                        <label>8</label>
                        <mixed-citation publication-type="journal">Gutman, D., Codella, N. C., Celebi, E., Helba, B., Marchetti, M., Mishra, N., and Halpern, A., 2016. Skin lesion analysis toward melanoma detection: A challenge at the international symposium on biomedical imaging (ISBI) 2016, hosted by the international skin imaging collaboration (ISIC). arXiv preprint arXiv:1605.01397.</mixed-citation>
                    </ref>
                                    <ref id="ref9">
                        <label>9</label>
                        <mixed-citation publication-type="journal">Hasan, M. K., Dahal, L., Samarakoon, P. N., Tushar, F. I., and Martí, R.,2020. DSNet: Automatic dermoscopic skin lesion segmentation. Computers in biology and medicine, 120, 103738.</mixed-citation>
                    </ref>
                                    <ref id="ref10">
                        <label>10</label>
                        <mixed-citation publication-type="journal">Huang, G., Liu, Z., Van Der Maaten, L., Weinberger, K. Q.,2017. Densely connected convolutional networks. In Proceedings of the IEEE conference on computer vision and pattern recognition, 4700-4708.</mixed-citation>
                    </ref>
                                    <ref id="ref11">
                        <label>11</label>
                        <mixed-citation publication-type="journal">International Agency for Research on Cancer, World Health Organization. 2024. Retrieved 14 January 2024,  from https://www.iarc.who.int/cancer-type/skin-cancer/.</mixed-citation>
                    </ref>
                                    <ref id="ref12">
                        <label>12</label>
                        <mixed-citation publication-type="journal">LeCun, Y., Bengio, Y., and Hinton, G., 2015. Deep learning, Nature, 521(7553), 436-444.</mixed-citation>
                    </ref>
                                    <ref id="ref13">
                        <label>13</label>
                        <mixed-citation publication-type="journal">Long, J., Shelhamer, E., and Darrell, T., 2015. Fully convolutional networks for semantic segmentation. In Proceedings of the IEEE conference on computer vision and pattern recognition, 3431-3440.</mixed-citation>
                    </ref>
                                    <ref id="ref14">
                        <label>14</label>
                        <mixed-citation publication-type="journal">Lou, A., Guan, S., Ko, H., and Loew, M. H., 2022. CaraNet: context axial reverse attention network for segmentation of small medical objects. In Medical Imaging 2022: Image Processing, 81-92.</mixed-citation>
                    </ref>
                                    <ref id="ref15">
                        <label>15</label>
                        <mixed-citation publication-type="journal">Mahbod, A., Tschandl, P., Langs, G., Ecker, R., and Ellinger, I., 2020. The effects of skin lesion segmentation on the performance of dermatoscopic image classification. Computer Methods and Programs in Biomedicine, 197, 105725.</mixed-citation>
                    </ref>
                                    <ref id="ref16">
                        <label>16</label>
                        <mixed-citation publication-type="journal">MedicalNewsToday. What to know about melanoma vs. skin cancer, 2024. Retrieved 10 January 2025, from https://www.medicalnewstoday.com/articles/melanoma-vs-skin-cancer.</mixed-citation>
                    </ref>
                                    <ref id="ref17">
                        <label>17</label>
                        <mixed-citation publication-type="journal">Melanoma Research Alliance. Melanoma Survival Rates. 2024. Retrieved 20 March 2025, from https://www.curemelanoma.org/about-melanoma/melanoma-staging/melanoma-survival-rates.</mixed-citation>
                    </ref>
                                    <ref id="ref18">
                        <label>18</label>
                        <mixed-citation publication-type="journal">Mohakud, R., &amp; Dash, R. 2022. Skin cancer image segmentation utilizing a novel EN-GWO based hyper-parameter optimized FCEDN. Journal of King Saud University-Computer and Information Sciences, 34(10), 9889-9904.</mixed-citation>
                    </ref>
                                    <ref id="ref19">
                        <label>19</label>
                        <mixed-citation publication-type="journal">Nida, N., Irtaza, A., Javed, A., Yousaf, M. H., &amp; Mahmood, M. T. 2019. Melanoma lesion detection and segmentation using deep region based convolutional neural network and fuzzy C-means clustering. International journal of medical informatics, 124, 37-48.</mixed-citation>
                    </ref>
                                    <ref id="ref20">
                        <label>20</label>
                        <mixed-citation publication-type="journal">Nguyen, D. K., Tran, T. T., Nguyen, C. P., and Pham, V. T., 2020. Skin lesion segmentation based on integrating efficientnet and residual block into u-net neural network. In 2020 5th International Conference on Green Technology and Sustainable Development (GTSD), 366-371.</mixed-citation>
                    </ref>
                                    <ref id="ref21">
                        <label>21</label>
                        <mixed-citation publication-type="journal">Oktay, O., Schlemper, J., Folgoc, L. L., Lee, M., Heinrich, M., Misawa, K., and Rueckert, D.,2018. Attention u-net: Learning where to look for the pancreas.  arXiv preprint arXiv:1804.03999.</mixed-citation>
                    </ref>
                                    <ref id="ref22">
                        <label>22</label>
                        <mixed-citation publication-type="journal">Qi, W., Wei, M., Yang, W., Xu, C., and Ma, C.,2020. Automatic mapping of landslides by the ResU-Net. Remote Sensing, 12(15), 2487.</mixed-citation>
                    </ref>
                                    <ref id="ref23">
                        <label>23</label>
                        <mixed-citation publication-type="journal">Ronneberger, O., Fischer, P., and Brox, T.,2015. U-net: Convolutional networks for biomedical image segmentation. In Medical image computing and computer-assisted intervention–MICCAI 2015: 18th international conference, Munich, Germany, pp. 234-241.</mixed-citation>
                    </ref>
                                    <ref id="ref24">
                        <label>24</label>
                        <mixed-citation publication-type="journal">Ruan, J., Xie, M., Gao, J., Liu, T., and Fu, Y. 2023. Ege-unet: an efficient group enhanced unet for skin lesion segmentation. In International Conference on Medical Image Computing and Computer-Assisted Intervention, 481-490.</mixed-citation>
                    </ref>
                                    <ref id="ref25">
                        <label>25</label>
                        <mixed-citation publication-type="journal">Sarvamangala, D. R., Kulkarni, R. V., 2022. Convolutional neural networks in medical image understanding: a survey.  Evolutionary intelligence, 15(1), 1-22.</mixed-citation>
                    </ref>
                                    <ref id="ref26">
                        <label>26</label>
                        <mixed-citation publication-type="journal">Vesal, S., Ravikumar, N., and Maier, A., 2018. SkinNet: A deep learning framework for skin lesion segmentation. In 2018 IEEE nuclear science symposium and medical imaging conference proceedings (NSS/MIC), 1-3.</mixed-citation>
                    </ref>
                                    <ref id="ref27">
                        <label>27</label>
                        <mixed-citation publication-type="journal">World Cancer Research Fund International. Skin cancer statistics. 2024. Retrieved 14 January 2024. From https://www.wcrf.org/cancer-trends/skin-cancer-statistics/</mixed-citation>
                    </ref>
                                    <ref id="ref28">
                        <label>28</label>
                        <mixed-citation publication-type="journal">Zebari, N. A., &amp; Tenekeci, E. 2022. Skin Lesion Segmentation Using K-means Clustering with Removal Unwanted Regions. Adıyaman Üniversitesi Mühendislik Bilimleri Dergisi, 9(18), 519-529.</mixed-citation>
                    </ref>
                                    <ref id="ref29">
                        <label>29</label>
                        <mixed-citation publication-type="journal">Zhou, Z., Rahman Siddiquee, M. M., Tajbakhsh, N., and Liang, J., 2018. Unet++: A nested u-net architecture for medical image segmentation. In Deep Learning in Medical Image Analysis and Multimodal Learning for Clinical Decision. 4th International Workshop, DLMIA 2018, and 8th International Workshop, ML-CDS 2018, Conjunction with MICCAI 2018, Granada, Spain, 3-11.</mixed-citation>
                    </ref>
                            </ref-list>
                    </back>
    </article>
