<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.4 20241031//EN"
        "https://jats.nlm.nih.gov/publishing/1.4/JATS-journalpublishing1-4.dtd">
<article  article-type="research-article"        dtd-version="1.4">
            <front>

                <journal-meta>
                                                                <journal-id>gummfd</journal-id>
            <journal-title-group>
                                                                                    <journal-title>Gazi Üniversitesi Mühendislik Mimarlık Fakültesi Dergisi</journal-title>
            </journal-title-group>
                            <issn pub-type="ppub">1300-1884</issn>
                                        <issn pub-type="epub">1304-4915</issn>
                                                                                            <publisher>
                    <publisher-name>Gazi Üniversitesi</publisher-name>
                </publisher>
                    </journal-meta>
                <article-meta>
                                        <article-id pub-id-type="doi">10.17341/gazimmfd.1199140</article-id>
                                                                <article-categories>
                                            <subj-group  xml:lang="en">
                                                            <subject>Engineering</subject>
                                                    </subj-group>
                                            <subj-group  xml:lang="tr">
                                                            <subject>Mühendislik</subject>
                                                    </subj-group>
                                    </article-categories>
                                                                                                                                                        <title-group>
                                                                                                                        <article-title>Kamkat meyvesi için derin öğrenmeye dayalı otonom hasat robotu tasarımı</article-title>
                                                                                                    </title-group>
            
                                                    <contrib-group content-type="authors">
                                                                        <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0002-0361-5612</contrib-id>
                                                                <name>
                                    <surname>Gündüz</surname>
                                    <given-names>Taner</given-names>
                                </name>
                                                                    <aff>İSKENDERUN TEKNİK ÜNİVERSİTESİ</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0002-9689-2554</contrib-id>
                                                                <name>
                                    <surname>Dersuneli</surname>
                                    <given-names>Mehmet</given-names>
                                </name>
                                                                    <aff>İSKENDERUN TEKNİK ÜNİVERSİTESİ</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0002-9853-2878</contrib-id>
                                                                <name>
                                    <surname>Kutlu</surname>
                                    <given-names>Yakup</given-names>
                                </name>
                                                                    <aff>İSKENDERUN TEKNİK ÜNİVERSİTESİ</aff>
                                                            </contrib>
                                                                                </contrib-group>
                        
                                        <pub-date pub-type="pub" iso-8601-date="20240520">
                    <day>05</day>
                    <month>20</month>
                    <year>2024</year>
                </pub-date>
                                        <volume>39</volume>
                                        <issue>3</issue>
                                        <fpage>1879</fpage>
                                        <lpage>1892</lpage>
                        
                        <history>
                                    <date date-type="received" iso-8601-date="20221103">
                        <day>11</day>
                        <month>03</month>
                        <year>2022</year>
                    </date>
                                                    <date date-type="accepted" iso-8601-date="20231209">
                        <day>12</day>
                        <month>09</month>
                        <year>2023</year>
                    </date>
                            </history>
                                        <permissions>
                    <copyright-statement>Copyright © 1986, Gazi Üniversitesi Mühendislik Mimarlık Fakültesi Dergisi</copyright-statement>
                    <copyright-year>1986</copyright-year>
                    <copyright-holder>Gazi Üniversitesi Mühendislik Mimarlık Fakültesi Dergisi</copyright-holder>
                </permissions>
            
                                                                                                <abstract><p>Otonom robotlar, dünya nüfus artışı karşısında azalan tarımsal üretim alanlarına ve tarımsal işgücü ihtiyacına çözüm olarak ortaya çıkıyor. Dünya genelinde insan hatalarından ve çalışma sürelerinden bağımsız bir yöntem olarak otonom hasat robotları üzerinde çalışmalar yapılmaktadır. Bu çalışmada, mobil bir platform üzerinde 6 eksenli bir robotik kol tasarlanmıştır. Derin öğrenme algoritmaları ile kamkat meyve tespiti yapılmış, özel tasarlanmış bir vakum tutucu ile entegre bir görüntü işleme algoritması oluşturulmuştur. Ayrıca literatürde hasat performansını düşüren yaprak sorunu ele alınmış ve çözüm önerilmiştir. Nesne tespiti sonrası geliştirilen algoritma ile yaprak veya herhangi bir engele takılmadan hasat gerçekleştirilmiştir. Denavit-Hartenberg (D-H) yöntemi kullanılarak elde edilen veri setinin ters kinematik hesaplamaları için yapay sinir tabanlı model oluşturularak robot hareketleri hesaplanmıştır. Nesne tespit başarısı %93 olup, saksılı kamkat ağaçlarında yapılan testler sonucunda %75 hasat başarısı elde edilmiştir.</p></abstract>
                                                            
            
                                                            <kwd-group>
                                                    <kwd>Otonom sistemler</kwd>
                                                    <kwd>  son efektörler</kwd>
                                                    <kwd>  makine öğrenmesi</kwd>
                                                    <kwd>  çok katmanlı algılayıcılar</kwd>
                                            </kwd-group>
                            
                                                                                                                        </article-meta>
    </front>
    <back>
                            <ref-list>
                                    <ref id="ref1">
                        <label>1</label>
                        <mixed-citation publication-type="journal">[1] 	Odegard I. Y. R., Van der Voet E., The future of food Scenarios and the effect on natural resource use in agriculture in 2050, Ecological Economics, 97, 51-59, 2014.</mixed-citation>
                    </ref>
                                    <ref id="ref2">
                        <label>2</label>
                        <mixed-citation publication-type="journal">[2]       Iqbal J., Islam R. U., Abbas S. Z., Khan A. A., Ajwad S. A., Automatzacija industrijskih poslova kroz mehatroničke sustave pregled robotike iz industrijske perspective, Tehnički vjesnik, 23(3), 917-924, 2016.</mixed-citation>
                    </ref>
                                    <ref id="ref3">
                        <label>3</label>
                        <mixed-citation publication-type="journal">[3]        Hassan M. U., Ullah M., Iqbal J., Towards autonomy in agriculture: Design and prototyping of a robotic vehicle with seed selector, 2nd International Conference on Robotics and Artificial Intelligence ICRAI, pp. 37-44, IEEE, 2016.</mixed-citation>
                    </ref>
                                    <ref id="ref4">
                        <label>4</label>
                        <mixed-citation publication-type="journal">[4]        Tanigaki K., Fujiura T., Akase A., Imagawa J., Cherry-harvesting robot, Computers and electronics in agriculture, 63(1), 65-72, 2008.</mixed-citation>
                    </ref>
                                    <ref id="ref5">
                        <label>5</label>
                        <mixed-citation publication-type="journal">[5]	Almendral K. A. M., Babaran R. M. G., Carzon B. J. C., Cu K. P. K., Lalanto J. M., Abad A. C., Autonomous fruit harvester with machine vision. Journal of Telecommunication, Electronic and Computer Engineering (JTEC), 10(1-6), 79-86, 2018.</mixed-citation>
                    </ref>
                                    <ref id="ref6">
                        <label>6</label>
                        <mixed-citation publication-type="journal">[6]	Feng Q., Zou W., Fan P., Zhang C., Wang X., Design and test of robotic harvesting system for cherry tomato, International Journal of Agricultural and Biological Engineering, 11(1), 96-100, 2018.</mixed-citation>
                    </ref>
                                    <ref id="ref7">
                        <label>7</label>
                        <mixed-citation publication-type="journal">[7]      Luo L., Tang Y., Lu Q., Chen X., Zhang P., A vision methodology for harvesting robot to detect cutting points on peduncles of double overlapping grape clusters in a vineyard, Computers in industry, 99, 130-139, 2018.</mixed-citation>
                    </ref>
                                    <ref id="ref8">
                        <label>8</label>
                        <mixed-citation publication-type="journal">[8]	Xiong Y., Peng C., Grimstad L., From P. J., Isler V., Development and field evaluation of a strawberry harvesting robot with a cable-driven gripper, Computers and electronics in agriculture, 157, 392-402, 2019.</mixed-citation>
                    </ref>
                                    <ref id="ref9">
                        <label>9</label>
                        <mixed-citation publication-type="journal">[9]	Williams H. A., Jones M. H., Nejati M., Seabright M. J., Bell J., Penhall N. D., MacDonald B. A., Robotic kiwifruit harvesting using machine vision, convolutional neural networks and robotic arms, Biosystems Engineering, 181, 140-156, 2019.</mixed-citation>
                    </ref>
                                    <ref id="ref10">
                        <label>10</label>
                        <mixed-citation publication-type="journal">[10]	Onishi Y., Yoshida T., Kurita H., Fukao T., Arihara H., Iwai A., An automated fruit harvesting robot by using deep learning, Robomech Journal, 6(1), 1-8, 2019.</mixed-citation>
                    </ref>
                                    <ref id="ref11">
                        <label>11</label>
                        <mixed-citation publication-type="journal">[11] 	Arad B., Balendonck J., Barth R., Ben Shahar O., Edan Y., Hellström T., van Tuijl B., Development of a sweet pepper harvesting robot, Journal of Field Robotics, 37(6), 1027-1039, 2020.</mixed-citation>
                    </ref>
                                    <ref id="ref12">
                        <label>12</label>
                        <mixed-citation publication-type="journal">[12] 	Zhang K., Lammers K., Chu P., Li Z., Lu R., System design and control of an apple harvesting robot, Mechatronics, 79, 102644, 2021.</mixed-citation>
                    </ref>
                                    <ref id="ref13">
                        <label>13</label>
                        <mixed-citation publication-type="journal">[13]     Yin W., Wen H., Ning Z., Ye J., Dong Z., Fruit Detection and Pose Estimation for Grape Cluster Harvesting Robot Using Binocular Imagery Based on Deep Neural Networks, Frontiers in Robotics and AI, 8, 2021.</mixed-citation>
                    </ref>
                                    <ref id="ref14">
                        <label>14</label>
                        <mixed-citation publication-type="journal">[14]	Jun J., Kim J., Seol J., Kim J., Son H. I., Towards an Efficient Tomato Harvesting Robot: 3D Perception, Manipulation, and End-Effector, IEEE Access, 9, 17631-17640, 2021.</mixed-citation>
                    </ref>
                                    <ref id="ref15">
                        <label>15</label>
                        <mixed-citation publication-type="journal">[15]      Yoshida T., Kawahara T., Fukao T., Fruit Recognition Method for a Harvesting Robot with RGB-D Cameras, 2022.</mixed-citation>
                    </ref>
                                    <ref id="ref16">
                        <label>16</label>
                        <mixed-citation publication-type="journal">[16]       Wan H., Fan Z., Yu X., Kang M., Wang P., A real-time branch detection and reconstruction mechanism for harvesting robot via convolutional neural network and image segmentation, Computers and Electronics in Agriculture, 192, 106609, 2022.</mixed-citation>
                    </ref>
                                    <ref id="ref17">
                        <label>17</label>
                        <mixed-citation publication-type="journal">[17]	Dumitrache A., Robot kinematics diagram http://alexdu.github.io/sketch-lib, 2010, Erişim tarihi: 12.07.2021.</mixed-citation>
                    </ref>
                                    <ref id="ref18">
                        <label>18</label>
                        <mixed-citation publication-type="journal">[18] 	Bochkovskiy A., Wang C. Y., Liao H. Y. M., Yolov4: Optimal speed and accuracy of object detection, arXiv preprint arXiv:2004.10934, 2020.</mixed-citation>
                    </ref>
                                    <ref id="ref19">
                        <label>19</label>
                        <mixed-citation publication-type="journal">[19]	Zhu L., Geng X., Li Z., Liu C., Improving yolov5 with attention mechanism for detecting boulders from planetary images. Remote Sensing, 13(18), 3776, 2021.</mixed-citation>
                    </ref>
                                    <ref id="ref20">
                        <label>20</label>
                        <mixed-citation publication-type="journal">[20]	WEB (a), Overview of model structure about YOLOv5, https://github.com/ultralytics/yolov5/issues/280, 2020, Erişim tarihi: 25.06.2021</mixed-citation>
                    </ref>
                                    <ref id="ref21">
                        <label>21</label>
                        <mixed-citation publication-type="journal">[21]	Li C., Li L., Jiang H.,  Weng K., Geng Y.,  Li L.,  Ke Z., Li Q., Cheng M.,   Nie M., Li Y., Zhang B., Liang Y., Zhou L., Xu X., Chu X., Wei X., Wei X., YOLOv6: A Single-Stage Object Detection Framework for Industrial Applications, arXiv preprint arXiv:2209.02976, 2022.</mixed-citation>
                    </ref>
                                    <ref id="ref22">
                        <label>22</label>
                        <mixed-citation publication-type="journal">[22]	Solawetz J., Nelson J., What&#039;s New in YOLOv6?, https://blog.roboflow.com/yolov6/, 2022, Erişim Tarihi: 20.08.2022</mixed-citation>
                    </ref>
                                    <ref id="ref23">
                        <label>23</label>
                        <mixed-citation publication-type="journal">[23] 	Yayik A., Kutlu Y., Diagnosis of congestive heart failure using poincare map plot, 20th Signal Processing and Communications Applications Conference (SIU), Muğla, 1-4, 2012.</mixed-citation>
                    </ref>
                                    <ref id="ref24">
                        <label>24</label>
                        <mixed-citation publication-type="journal">[24]	Shao C., A quantum model for multilayer perceptron. arXiv preprint arXiv:1808.10561, 2018.</mixed-citation>
                    </ref>
                                    <ref id="ref25">
                        <label>25</label>
                        <mixed-citation publication-type="journal">[25]	Altınkaynak A., Ağsız Yöntem Uygulamaları için Trigonometri Tabanlı Radyal Özelliğe Sahip Yeni Bir Temel Fonksiyon, International Journal of Advances in Engineering and Pure Sciences 32.1, 96-110, 2020.</mixed-citation>
                    </ref>
                                    <ref id="ref26">
                        <label>26</label>
                        <mixed-citation publication-type="journal">[26]	Huang G.B., Qin-Yu Z., Chee-Kheong S., Extreme learning machine: theory and applications, Neurocomputing 70.1-3, 489-501, 2006.</mixed-citation>
                    </ref>
                                    <ref id="ref27">
                        <label>27</label>
                        <mixed-citation publication-type="journal">[27]	Haykin, Simon (1998). Neural Networks: A Comprehensive Foundation (2 ed.). Prentice Hall. ISBN 0-13-273350-1.</mixed-citation>
                    </ref>
                                    <ref id="ref28">
                        <label>28</label>
                        <mixed-citation publication-type="journal">[28] 	Šegota S. B., Anđelić N., Mrzljak V., Lorencin I., Kuric I., Car Z., Utilization of multilayer perceptron for determining the inverse kinematics of an industrial robotic manipulator, International Journal of Advanced Robotic Systems, 18(4), 2021.</mixed-citation>
                    </ref>
                                    <ref id="ref29">
                        <label>29</label>
                        <mixed-citation publication-type="journal">[29]	Camero A, Toutouh J, Alba E., A specialized evolutionary strategy using mean absolute error random sampling to design recurrent neural networks, arXiv:1909.02425, 1–10, 2019.</mixed-citation>
                    </ref>
                                    <ref id="ref30">
                        <label>30</label>
                        <mixed-citation publication-type="journal">[30]	Bradski G., Kaehler A., Learning OpenCV: Computer vision with the OpenCV library, O&#039;Reilly Media, Sebastopol, 1-580, 2008.</mixed-citation>
                    </ref>
                                    <ref id="ref31">
                        <label>31</label>
                        <mixed-citation publication-type="journal">[31] 	Marcmateo, BCN3D Technologies. https://github.com/BCN3D, 2018, Erişim tarihi: 31.04.2021.</mixed-citation>
                    </ref>
                                    <ref id="ref32">
                        <label>32</label>
                        <mixed-citation publication-type="journal">[32]	Zhao Y., Gong L., Huang Y., Liu C., A review of key techniques of vision-based control for harvesting robot, Computers and Electronics in Agriculture, 127, 311-323, 2016.</mixed-citation>
                    </ref>
                            </ref-list>
                    </back>
    </article>
