<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.4 20241031//EN"
        "https://jats.nlm.nih.gov/publishing/1.4/JATS-journalpublishing1-4.dtd">
<article  article-type="reviewer-report"        dtd-version="1.4">
            <front>

                <journal-meta>
                                                                <journal-id>demiryolu mühendisliği</journal-id>
            <journal-title-group>
                                                                                    <journal-title>Demiryolu Mühendisliği</journal-title>
            </journal-title-group>
                            <issn pub-type="ppub">2149-1607</issn>
                                        <issn pub-type="epub">2687-2463</issn>
                                                                                            <publisher>
                    <publisher-name>Demiryolu Mühendisleri Derneği</publisher-name>
                </publisher>
                    </journal-meta>
                <article-meta>
                                        <article-id pub-id-type="doi">10.47072/demiryolu.1918976</article-id>
                                                                <article-categories>
                                            <subj-group  xml:lang="en">
                                                            <subject>Electronic Sensors</subject>
                                                            <subject>Photonic and Electro-Optical Devices, Sensors and Systems (Excl. Communications)</subject>
                                                    </subj-group>
                                            <subj-group  xml:lang="tr">
                                                            <subject>Elektronik Algılayıcılar</subject>
                                                            <subject>Fotonik ve Elektro-Optik Cihazlar, Sensörler ve Sistemler (İletişim Hariç)</subject>
                                                    </subj-group>
                                    </article-categories>
                                                                                                                                                        <title-group>
                                                                                                                        <article-title>Otomatik Yolcu Sayma Sistemlerinde Derinlik Bilgisinin Kullanımına Yönelik Kapsamlı Bir İnceleme ve Kavramsal Mimari Çıkarımı</article-title>
                                                                                                                                                                                                <trans-title-group xml:lang="en">
                                    <trans-title>A Comprehensive Review and Conceptual Architecture Derivation for the Use of Depth Information in Automated Passenger Counting Systems</trans-title>
                                </trans-title-group>
                                                                                                    </title-group>
            
                                                    <contrib-group content-type="authors">
                                                                        <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0009-0006-9906-8524</contrib-id>
                                                                <name>
                                    <surname>Görgülü</surname>
                                    <given-names>Şükrü</given-names>
                                </name>
                                                                    <aff>Albayrak Makine Elektronik San. Tic. A.Ş.</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0003-3410-2227</contrib-id>
                                                                <name>
                                    <surname>Adıyaman</surname>
                                    <given-names>Soner</given-names>
                                </name>
                                                                    <aff>Albayrak Makine Elektronik San. Tic. A.Ş.</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0002-5801-8292</contrib-id>
                                                                <name>
                                    <surname>Öden</surname>
                                    <given-names>Seray</given-names>
                                </name>
                                                                    <aff>Albayrak Makine Elektronik San. Tic. A.Ş.</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0003-3290-914X</contrib-id>
                                                                <name>
                                    <surname>Kırımça</surname>
                                    <given-names>Necim</given-names>
                                </name>
                                                                    <aff>Albayrak Makine Elektronik San. Tic. A.Ş.</aff>
                                                            </contrib>
                                                                                </contrib-group>
                        
                                        <pub-date pub-type="pub" iso-8601-date="20260504">
                    <day>05</day>
                    <month>04</month>
                    <year>2026</year>
                </pub-date>
                                                    <issue>24</issue>
                                                
                        <history>
                                    <date date-type="received" iso-8601-date="20260330">
                        <day>03</day>
                        <month>30</month>
                        <year>2026</year>
                    </date>
                                                    <date date-type="accepted" iso-8601-date="20260504">
                        <day>05</day>
                        <month>04</month>
                        <year>2026</year>
                    </date>
                            </history>
                                        <permissions>
                    <copyright-statement>Copyright © 2014, Demiryolu Mühendisliği</copyright-statement>
                    <copyright-year>2014</copyright-year>
                    <copyright-holder>Demiryolu Mühendisliği</copyright-holder>
                </permissions>
            
                                                                                                <abstract><p>Toplu taşıma sistemlerinde yolcu sayısı, hizmet planlaması ve operasyonel verimlilik açısından kritik bir girdidir. Ancak yolcu yoğunluğu; saat, hava koşulları ve yerel etkinlikler gibi faktörlere bağlı olarak sürekli değişmektedir. Geleneksel biletleme ve turnike tabanlı yöntemler, dolaylı veri üretmeleri ve insan faktörüne bağlı olmaları nedeniyle özellikle yoğun ortamlarda doğruluk sorunları yaratmaktadır. Bu nedenle otomatik yolcu sayma sistemleri önem kazanmıştır. Özellikle derinlik verisi kullanan yaklaşımlar son yıllarda öne çıkmıştır. Bu çalışmada, yolcu sayımında derinlik bilgisinin kullanımı sistematik olarak incelenmektedir. Renk+derinlik, stereo görüş ve uçuş süresi (Time of Flight - ToF) sensörlerinin otomatik yolcu sayma uygulamalarındaki rolleri incelenmiştir. Üstten bakış konfigürasyonunun örtülmeye (occlusion) karşı dayanıklı olması ve yön ayrıştırma başarımı sayesinde baskın mimari haline geldiği görülmüştür. Literatür, sensör tercihlerinin stereo sistemlerden ToF teknolojilerine evrildiğini; algoritmik olarak ise klasik segmentasyondan çok aşamalı tespit-takip ve öğrenme tabanlı hibrit modellere geçiş yaşandığını göstermektedir. Modern sistemlerde hata oranlarının %1–3 seviyelerine yaklaştığı ve 30-60 kare hızı ile gerçek zamanlılık hedeflerinin sağlandığı görülmektedir. Ayrıca mevcut literatür tarama çalışmalarında otomatik yolcu sayma sistemlerinin bileşenleri tekil olarak incelenmekte ve bileşenlerin sistem seviyesinde nasıl bir araya getirileceğine dair bütüncül bir çerçeve sunulmamaktadır. Bu nedenle, literatürdeki ortak tasarım desenlerinden hareketle, derinlik tabanlı otomatik yolcu sayma sistemleri için hem modüler hem de bütüncül bir referans mimari çıkarılmıştır. Bu mimari, algılama, ön işleme, insan tespiti, çoklu nesne takibi ve yön tabanlı sayım katmanlarını birleştirmektedir. Çalışmada, demiryolu toplu taşıma sistemlerinde otomatik yolcu sayımı için derinlik bilgisinin yalnızca sensör seçiminde değil, aynı zamanda sistem mimarisinde ve algoritma tasarımında önemli bir unsur olduğu sonucuna varılmıştır.</p></abstract>
                                                                                                                                    <trans-abstract xml:lang="en">
                            <p>Passenger volume is a key input for service planning and operations in public transport systems. However, it varies dynamically with factors such as time of day, weather, and local events, making accurate and real-time estimation essential for both efficiency and passenger comfort. Traditional ticketing and turnstile-based methods rely on indirect data and human factors, often leading to inaccuracies in crowded and dynamic environments. As a result, automated passenger counting (APC) systems have gained prominence, particularly those leveraging depth information. This study examines the use of depth-based approaches, focusing on RGB-D, Stereo Vision, and Time-of-Flight (ToF) sensors. Findings indicate that top-down configurations have become dominant due to their robustness against occlusion and effectiveness in direction detection. The literature also highlights a technological shift from stereo vision to ToF sensors, alongside a transition from classical segmentation methods to multi-stage detection, tracking, and learning-based hybrid models. Modern APC systems achieve error rates as low as 1–3% while maintaining real-time performance of 30–60 frames/second. Existing literature reviews examine the components of APC systems individually, and a holistic framework for integrating them at the system level is lacking. Therefore, based on common patterns in the literature, a modular reference architecture was derived for depth-based APC systems, integrating sensing, preprocessing, human detection, multi-object tracking, and direction-based counting. The study concludes that depth information is not only a topic for sensor choice but a key factor shaping system architecture and algorithmic design.</p></trans-abstract>
                                                            
            
                                                            <kwd-group>
                                                    <kwd>Otomatik Yolcu Sayma</kwd>
                                                    <kwd>  Derinlik Kamerası</kwd>
                                                    <kwd>  Time-Of-Flight</kwd>
                                                    <kwd>  Üstten Bakış</kwd>
                                                    <kwd>  Yolcu Tespiti</kwd>
                                                    <kwd>  Takip</kwd>
                                            </kwd-group>
                                                        
                                                                            <kwd-group xml:lang="en">
                                                    <kwd>Automatic Passenger Counting</kwd>
                                                    <kwd>  Depth Camera</kwd>
                                                    <kwd>  Time-Of-Flight</kwd>
                                                    <kwd>  Overhead</kwd>
                                                    <kwd>  Passenger Detection</kwd>
                                                    <kwd>  Tracking</kwd>
                                            </kwd-group>
                                                                                                        <funding-group specific-use="FundRef">
                    <award-group>
                                                    <funding-source>
                                <named-content content-type="funder_name">T.C. Ulaştırma ve Altyapı Bakanlığı, Ulaştırma, Denizcilik ve Haberleşme Araştırmaları Merkezi Başkanlığı (UDHAM)</named-content>
                            </funding-source>
                                                                            <award-id>2023.UT.01.01.004</award-id>
                                            </award-group>
                </funding-group>
                                </article-meta>
    </front>
    <back>
                            <ref-list>
                                    <ref id="ref1">
                        <label>1</label>
                        <mixed-citation publication-type="journal">[1]	J. Salvi, S. Fernandez, T. Pribanic, and X. Llado, “A state of the art in structured light patterns for surface profilometry,” Pattern Recognition, vol. 43, no. 8, pp. 2666–2680, 2010, doi: 10.1016/j.patcog.2010.03.004</mixed-citation>
                    </ref>
                                    <ref id="ref2">
                        <label>2</label>
                        <mixed-citation publication-type="journal">[2]	Z. Zhang, “Microsoft kinect sensor and its effect,” IEEE Multimedia, vol. 19, no. 2, pp. 4–10, 2012, doi: 10.1109/MMUL.2012.24</mixed-citation>
                    </ref>
                                    <ref id="ref3">
                        <label>3</label>
                        <mixed-citation publication-type="journal">[3]	D. Scharstein and R. Szeliski, “A taxonomy and evaluation of dense two-frame stereo correspond-ence algorithms,” International Journal of Computer Vision, vol. 47, no. 1–3, pp. 7–42, 2002, doi: 10.1023/A:1014573219977</mixed-citation>
                    </ref>
                                    <ref id="ref4">
                        <label>4</label>
                        <mixed-citation publication-type="journal">[4]	R. Hartley and A. Zisserman, Multiple view geometry in computer vision. Cambridge, U.K.: Cam-bridge University Press, 2004.</mixed-citation>
                    </ref>
                                    <ref id="ref5">
                        <label>5</label>
                        <mixed-citation publication-type="journal">[5]	R. Lange and P. Seitz, “Solid-state time-of-flight range camera,” IEEE Journal of Quantum Electron-ics, vol. 37, no. 3, pp. 390–397, 2001.</mixed-citation>
                    </ref>
                                    <ref id="ref6">
                        <label>6</label>
                        <mixed-citation publication-type="journal">[6]	A. Kolb, E. Barth, R. Koch, and R. Larsen, “Time-of-flight sensors in computer graphics,” Computer Graphics Forum, vol. 29, no. 1, pp. 141–159, 2010, doi: 10.1111/j.1467-8659.2009.01583.x</mixed-citation>
                    </ref>
                                    <ref id="ref7">
                        <label>7</label>
                        <mixed-citation publication-type="journal">[7]	H. Zhang, C. Reardon, and L. E. Parker, “Real-time multiple human perception with color-depth cameras on a mobile robot,” IEEE Transactions on Cybernetics, vol. 43, no. 5, pp. 1429–1441, 2013, doi: 10.1109/TCYB.2013.2275291</mixed-citation>
                    </ref>
                                    <ref id="ref8">
                        <label>8</label>
                        <mixed-citation publication-type="journal">[8]	O. H. Jafari, D. Mitzel, and B. Leibe, “Real-time RGB-D based people detection and tracking for mobile robots and head-worn cameras,” in Proc. IEEE Int. Conf. Robotics and Automation (ICRA), 2014, pp. 5636–5643, doi: 10.1109/ICRA.2014.6907688</mixed-citation>
                    </ref>
                                    <ref id="ref9">
                        <label>9</label>
                        <mixed-citation publication-type="journal">[9]	J. Liu, Y. Liu, Y. Cui, and Y. Q. Chen, “Real-time human detection and tracking in complex envi-ronments using single RGB-D camera,” in Proc. IEEE Int. Conf. Image Processing, 2013, pp. 3088–3092, doi: 10.1109/ICIP.2013.6738636</mixed-citation>
                    </ref>
                                    <ref id="ref10">
                        <label>10</label>
                        <mixed-citation publication-type="journal">[10]	J. Liu, Y. Liu, G. Zhang, P. Zhu, and Y. Q. Chen, “Detecting and tracking people in real time with RGB-D camera,” Pattern Recognition Letters, vol. 53, pp. 16–23, 2015, doi: 10.1016/j.patrec.2014.09.013</mixed-citation>
                    </ref>
                                    <ref id="ref11">
                        <label>11</label>
                        <mixed-citation publication-type="journal">[11]	J. Liu, G. Zhang, Y. Liu, L. Tian, and Y. Q. Chen, “An ultra-fast human detection method for color-depth camera,” Journal of Visual Communication and Image Representation, vol. 31, pp. 177–185, 2015, doi: 10.1016/j.jvcir.2015.06.014</mixed-citation>
                    </ref>
                                    <ref id="ref12">
                        <label>12</label>
                        <mixed-citation publication-type="journal">[12]	Y. Xiao, V. R. Kamat, and C. C. Menassa, “Human tracking from single RGB-D camera using online learning,” Image and Vision Computing, vol. 88, pp. 67–75, 2019, doi: 10.1016/j.imavis.2019.05.003</mixed-citation>
                    </ref>
                                    <ref id="ref13">
                        <label>13</label>
                        <mixed-citation publication-type="journal">[13]	Y. Yao, X. Zhang, Y. Liang, X. Zhang, F. Shen, and J. Zhao, “A real-time pedestrian counting system based on RGB-D,” in Proc. 12th Int. Conf. Advanced Computational Intelligence (ICACI), 2020, pp. 110–117, doi: 10.1109/ICACI49185.2020.9177816</mixed-citation>
                    </ref>
                                    <ref id="ref14">
                        <label>14</label>
                        <mixed-citation publication-type="journal">[14]	M. Gochoo, S. A. Rizwan, Y. Y. Ghadi, A. Jalal and K. Kim, “A systematic deep learning based over-head tracking and counting system using RGB-D remote cameras,” Applied Sciences, vol. 11, no. 12, pp. 5503, 2021, doi: 10.3390/app11125503</mixed-citation>
                    </ref>
                                    <ref id="ref15">
                        <label>15</label>
                        <mixed-citation publication-type="journal">[15]	L. Tian, M. Li, Y. Hao, J. Liu, G. Zhang, and Y. Q. Chen, “Robust 3-D human detection in complex environments with a depth camera,” IEEE Transactions on Multimedia, vol. 20, no. 9, pp. 2249–2261, 2018, doi: 10.1109/TMM.2018.2803526</mixed-citation>
                    </ref>
                                    <ref id="ref16">
                        <label>16</label>
                        <mixed-citation publication-type="journal">[16]	L. Xia, C.-C. Chen, and J. K. Aggarwal, “Human detection using depth information by kinect,” in CVPR 2011 Workshops, 2011, pp. 15–22, doi: 10.1109/CVPRW.2011.5981811</mixed-citation>
                    </ref>
                                    <ref id="ref17">
                        <label>17</label>
                        <mixed-citation publication-type="journal">[17]	F. Galčík and R. Gargalík, “Real-time depth map based people counting,” in International Confer-ence on Advanced Concepts for Intelligent Vision Systems, 2013, pp. 330–341.</mixed-citation>
                    </ref>
                                    <ref id="ref18">
                        <label>18</label>
                        <mixed-citation publication-type="journal">[18]	L. Zhu and K.-H. Wong, “Human tracking and counting using the kinect range sensor based on ada-boost and kalman filter,” in International Symposium on Visual Computing (ISVC), 2013, pp. 582–591.</mixed-citation>
                    </ref>
                                    <ref id="ref19">
                        <label>19</label>
                        <mixed-citation publication-type="journal">[19]	M. H. Khan, K. Shirahama, M.S. Farid and M. Grzegorzek, “Multiple human detection in depth im-ages,” in IEEE 18th Int. Workshop Multimedia Signal Processing (MMSP), 2016, pp. 1–6, doi: 10.1109/MMSP.2016.7813385</mixed-citation>
                    </ref>
                                    <ref id="ref20">
                        <label>20</label>
                        <mixed-citation publication-type="journal">[20]	Z. Qiuyu, T. Li, J. Yiping, and D. Wei-jun, “A novel approach of counting people based on stereovi-sion and DSP,” in The 2nd. Int. Conf. on Computer and Automation Engineering (ICCAE), 2010, pp. 81–84, doi: 10.1109/ICCAE.2010.5451996</mixed-citation>
                    </ref>
                                    <ref id="ref21">
                        <label>21</label>
                        <mixed-citation publication-type="journal">[21]	N. Bernini, L. Bombini, M. Buzzoni, P. Cerri, and P. Grisleri, “An embedded system for counting passengers in public transportation vehicles,” in IEEE/ASME 10th Int. Conf. Mechatronic and Em-bedded Systems and Applications (MESA), 2014, pp. 1–6, doi: 10.1109/MESA.2014.6935562</mixed-citation>
                    </ref>
                                    <ref id="ref22">
                        <label>22</label>
                        <mixed-citation publication-type="journal">[22]	T. Yahiaoui, L. Khoudour, and C. Meurie, “Real-time passenger counting in buses using dense ste-reovision,” Journal of Electronic Imaging, vol. 19, no. 3, 2010, doi: 10.1117/1.3455989</mixed-citation>
                    </ref>
                                    <ref id="ref23">
                        <label>23</label>
                        <mixed-citation publication-type="journal">[23]	M. Rauter, “Reliable human detection and tracking in top-view depth images,” in Proc. IEEE CVPR Workshops, 2013, pp. 529–534.</mixed-citation>
                    </ref>
                                    <ref id="ref24">
                        <label>24</label>
                        <mixed-citation publication-type="journal">[24]	D. Nurseitov, K. Bostanbekov, N. Toiganbayeva, A. Zhalgas, D. Yedilkhan, and B. Amirgaliyev, &quot;Vision-based people counting and tracking for urban environments,&quot; Journal of Imaging, vol. 12, no. 1, pp. 27, 2026, doi: 10.3390/jimaging12010027.</mixed-citation>
                    </ref>
                                    <ref id="ref25">
                        <label>25</label>
                        <mixed-citation publication-type="journal">[25]	D. Klauser, G. Bärwolff, and H. Schwandt, “A TOF-based automatic passenger counting approach in public transportation systems,” AIP Conference Proceedings, vol. 1648, no. 1, pp. 850113, 2015, doi: 10.1063/1.4913168</mixed-citation>
                    </ref>
                                    <ref id="ref26">
                        <label>26</label>
                        <mixed-citation publication-type="journal">[26]	A. Bevilacqua, L. Di Stefano, and P. Azzari, “People tracking using a time-of-flight depth sensor,” in Proc. IEEE Int. Conf. on Video and Signal Based Surveillance, 2006, pp. 89–89, doi: 10.1109/AVSS.2006.92</mixed-citation>
                    </ref>
                                    <ref id="ref27">
                        <label>27</label>
                        <mixed-citation publication-type="journal">[27]	R. Tanner, M. Studer, A. Zanoli, and A. Hartmann, “People detection and tracking with tof sensor,” in IEEE Fifth International Conference on Advanced Video and Signal Based Surveillance, 2008, pp. 356–361, doi: 10.1109/AVSS.2008.18</mixed-citation>
                    </ref>
                                    <ref id="ref28">
                        <label>28</label>
                        <mixed-citation publication-type="journal">[28]	S. Ikemura and H. Fujiyoshi, “Human detection by haar-like filtering using depth information,” in Proc. of the 21st ICPR, 2012, pp. 813–816.</mixed-citation>
                    </ref>
                                    <ref id="ref29">
                        <label>29</label>
                        <mixed-citation publication-type="journal">[29]	C. Stahlschmidt, A. Gavriilidis, J. Velten and A. Kummert, “Applications for a people detection and tracking algorithm using a time-of-flight camera,” Multimedia Tools and Applications, vol. 75, no. 17, pp. 10769–10786, 2016.</mixed-citation>
                    </ref>
                                    <ref id="ref30">
                        <label>30</label>
                        <mixed-citation publication-type="journal">[30]	C.A. Luna Vazquez, C. Losada Gutierrez, D. Fuentes Jimenez, A. Fernandez Rincon, M. R. Mazo Quintas, and J. Macias Guarasa “Robust people detection using depth information from an overhead time-of-flight camera,” Expert Systems with Applications, vol. 71, pp. 240–256, 2017, doi: 10.1016/j.eswa.2016.11.019</mixed-citation>
                    </ref>
                                    <ref id="ref31">
                        <label>31</label>
                        <mixed-citation publication-type="journal">[31]	A. Fernandez-Rincon, D. Fuentes-Jimenez, C. Losada-Gutierrez, M. Marron-Romera, C.A. Luna, J. Macias-Guarasa and M. Mazo, “Robust people detection and tracking from an overhead time-of-flight camera,” in International Conference on Computer Vision Theory and Applications, 2017, pp. 556–564.</mixed-citation>
                    </ref>
                                    <ref id="ref32">
                        <label>32</label>
                        <mixed-citation publication-type="journal">[32]	W. Wang, P. Liu, R. Ying, J. Wang, J. Qian, J. Jia, J. Gao, “A high-computational efficiency human detection and flow estimation method based on tof measurements,” Sensors, vol. 19, no. 3, pp. 729, 2019, doi: 10.3390/s19030729</mixed-citation>
                    </ref>
                                    <ref id="ref33">
                        <label>33</label>
                        <mixed-citation publication-type="journal">[33]	F. Zoghlami, O.K. Sen, H. Heinrich, G. Schneider, E. Ercelik, A. Knoll, T. Villmann, “ToF/radar early feature-based fusion system for human detection and tracking,” in 22nd IEEE ICIT, 2021, pp. 942–949, doi: 10.1109/ICIT46573.2021.9453703</mixed-citation>
                    </ref>
                                    <ref id="ref34">
                        <label>34</label>
                        <mixed-citation publication-type="journal">[34]	W. Rahmaniar, W. Wang, C. Chiu and N. Hakim, “Real-time bi-directional people counting using an RGB-D camera,” Sensor Review, vol. 41, no. 4, pp. 341-349, 2021, doi: 10.1108/SR-12-2020-0301</mixed-citation>
                    </ref>
                                    <ref id="ref35">
                        <label>35</label>
                        <mixed-citation publication-type="journal">[35]	C. Stahlschmidt, A. Gavriilidis, J. Velten, and A. Kummert, “People detection and tracking from a top-view position using a time-of-flight camera,” in MCSS, 2013, pp. 213–223.</mixed-citation>
                    </ref>
                                    <ref id="ref36">
                        <label>36</label>
                        <mixed-citation publication-type="journal">[36]	L. Tamas and A. Cozma, “Embedded real-time people detection and tracking with time-of-flight camera,” in Real-Time Image Processing and Deep Learning, vol. 11736, pp. 65-70, 2021, doi: 10.1117/12.2586057</mixed-citation>
                    </ref>
                                    <ref id="ref37">
                        <label>37</label>
                        <mixed-citation publication-type="journal">[37]	T.-E. Tseng, A.-S. Liu, P.-H. Hsiao, C.-M. Huang, and L.-C. Fu, “Real-time people detection and tracking for indoor surveillance using multiple top-view depth cameras,” in. IEEE/RSJ IROS, 2014, pp. 4077–4082, doi: 10.1109/IROS.2014.6943136</mixed-citation>
                    </ref>
                                    <ref id="ref38">
                        <label>38</label>
                        <mixed-citation publication-type="journal">[38]	S. T. Dharmarajan, Y. Zhang, B. Li, M. Saada, H. Cai, Q. Meng, Q. Zhu, Q. Han, “Development of a depth imaging-based passenger counting sensor for public transportation,” in IEEE RAICS, 2025, pp. 192–200, doi: 10.1109/RAICS66191.2025.11332612</mixed-citation>
                    </ref>
                                    <ref id="ref39">
                        <label>39</label>
                        <mixed-citation publication-type="journal">[39]	S. Thandassery, Y. Zhang, B. Li, M. Saada, H. Cai, Q. Meng, Q. Zhu, Q. Han “Smart railway passen-ger counting and information systems powered by real-time embedded AI and computer vision,” in IEEE ICVES, 2025, pp. 453–459, doi: 10.1109/ICVES65691.2025.11376469</mixed-citation>
                    </ref>
                            </ref-list>
                    </back>
    </article>
