<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.4 20241031//EN"
        "https://jats.nlm.nih.gov/publishing/1.4/JATS-journalpublishing1-4.dtd">
<article  article-type="research-article"        dtd-version="1.4">
            <front>

                <journal-meta>
                                                                <journal-id>jsr-c</journal-id>
            <journal-title-group>
                                                                                    <journal-title>Journal of Scientific Reports-C</journal-title>
            </journal-title-group>
                                        <issn pub-type="epub">2717-8633</issn>
                                                                                            <publisher>
                    <publisher-name>Kütahya Dumlupınar Üniversitesi</publisher-name>
                </publisher>
                    </journal-meta>
                <article-meta>
                                        <article-id/>
                                                                <article-categories>
                                            <subj-group  xml:lang="en">
                                                            <subject>Mining Engineering (Other)</subject>
                                                    </subj-group>
                                            <subj-group  xml:lang="tr">
                                                            <subject>Maden Mühendisliği (Diğer)</subject>
                                                    </subj-group>
                                    </article-categories>
                                                                                                                                                        <title-group>
                                                                                                                        <article-title>From photogrammetric modeling to augmented application of a quarry</article-title>
                                                                                                                                                                                                <trans-title-group xml:lang="tr">
                                    <trans-title>Bir Taş Ocağının Fotogrametrik Modellemesinden Artırılmış Gerçeklik Uygulamasına</trans-title>
                                </trans-title-group>
                                                                                                    </title-group>
            
                                                    <contrib-group content-type="authors">
                                                                        <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0009-0003-3287-9224</contrib-id>
                                                                <name>
                                    <surname>Masanawa</surname>
                                    <given-names>Zayyad Abdul</given-names>
                                </name>
                                                                    <aff>KÜTAHYA DUMLUPINAR ÜNİVERSİTESİ</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0002-8164-8874</contrib-id>
                                                                <name>
                                    <surname>Özdemir</surname>
                                    <given-names>Mehmet</given-names>
                                </name>
                                                                    <aff>Kütahya Dumlupınar Üniversitesi</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0002-1875-4009</contrib-id>
                                                                <name>
                                    <surname>Erarslan</surname>
                                    <given-names>Kaan</given-names>
                                </name>
                                                                    <aff>KÜTAHYA DUMLUPINAR ÜNİVERSİTESİ</aff>
                                                            </contrib>
                                                                                </contrib-group>
                        
                                        <pub-date pub-type="pub" iso-8601-date="20260430">
                    <day>04</day>
                    <month>30</month>
                    <year>2026</year>
                </pub-date>
                                                    <issue>012</issue>
                                        <fpage>12</fpage>
                                        <lpage>25</lpage>
                        
                        <history>
                                    <date date-type="received" iso-8601-date="20251217">
                        <day>12</day>
                        <month>17</month>
                        <year>2025</year>
                    </date>
                                                    <date date-type="accepted" iso-8601-date="20260417">
                        <day>04</day>
                        <month>17</month>
                        <year>2026</year>
                    </date>
                            </history>
                                        <permissions>
                    <copyright-statement>Copyright © 2020, Journal of Scientific Reports-C</copyright-statement>
                    <copyright-year>2020</copyright-year>
                    <copyright-holder>Journal of Scientific Reports-C</copyright-holder>
                </permissions>
            
                                                                                                <abstract><p>The study presents a methodological approach for a pipeline from three-dimensional photogrammetry model generation by an Unmanned Aerial Vehicle (UAV) to an Augmented Reality (AR) application development in a quarry. Initially, a high precision 3D model of a real quarry pit was generated using the imagery of a non-RTK UAV. After scanning the area, the successive photographs taken by the drone were processed by using a photogrammetry software application to build a mesh that was optimized to 50,000 polygons. It was later developed into an interactive holographic application with the Unity engine and Vuforia platform. Two AR tracking paradigms, namely Image Target and Ground Plane were generated and tested on both mobile platforms and Microsoft HoloLens 2 device. In a system evaluation conducted by a group of 17 students and 12 academics who tested the application, a minimum average score of 85 and 90 relatively, out of 100 were given in terms of spatial understanding. This perspective was framed within the context of increased memorability, positive contribution to learning, more enjoyable learning, improved educational quality, and the widespread adoption of such applications. Academicians&#039; outcomes related to the practices included in the study were found to be more positive and this qualitatively supports the educational effectiveness of the system. Additionally, it is observed that there is a trade-off between geometric accuracy and real-time rendering execution on mobile platforms. It was concluded that the Ground Plane method provides a workable material for a high-fidelity digital shadow, which can be applied to remote inspection. Additionally, it is foreseen that AR applications executed on smartphone/tablet and HoloLens 2 have infrastructurally potential to enhance the level of engineering, planning and control process and support mine-safety.</p></abstract>
                                                                                                                                    <trans-abstract xml:lang="tr">
                            <p>Bu çalışma, bir taş ocağında İnsansız Hava Aracı (İHA) tarafından üç boyutlu fotogrametri modeli oluşturulmasından Artırılmış Gerçeklik (AR) uygulaması geliştirilmesine kadar uzanan bir süreç için metodolojik bir yaklaşım sunmaktadır. Başlangıçta, gerçek bir taş ocağı çukurunun yüksek hassasiyetli 3D modeli, RTK olmayan bir İHA&#039;nın görüntüleri kullanılarak oluşturulmuştur. Alan tarandıktan sonra, dron tarafından çekilen ardışık fotoğraflar, 50.000 poligona optimize edilmiş bir ağ oluşturmak için bir fotogrametri yazılım uygulaması kullanılarak işlenmiştir. Daha sonra, Unity motoru ve Vuforia platformu ile etkileşimli bir holografik uygulamaya dönüştürülmüştür. Görüntü Hedefi ve Zemin Düzlemi olmak üzere iki AR izleme paradigması oluşturulmuş ve hem mobil platformlarda hem de Microsoft HoloLens 2 cihazında test edilmiştir. Uygulamayı test eden 17 öğrenci ve 12 akademisyenden oluşan bir grup tarafından yapılan sistem değerlendirmesinde, mekansal anlama açısından 100 üzerinden sırasıyla en az 85 ve 90 ortalama puan verilmiştir. Bu bakış açısı, artan akılda kalıcılık, öğrenmeye olumlu katkı, daha keyifli öğrenme, iyileştirilmiş eğitim kalitesi ve bu tür uygulamaların yaygın olarak benimsenmesi bağlamında çerçevelenmiştir. Çalışmaya dahil edilen uygulamalarla ilgili akademisyenlerin sonuçlarının daha olumlu olduğu ve bunun sistemin eğitimsel etkinliğini niteliksel olarak desteklediği bulunmuştur. Ek olarak, mobil platformlarda geometrik doğruluk ve gerçek zamanlı işleme arasında bir denge olduğu gözlemlenmiştir. Zemin Düzlemi yönteminin, uzaktan denetim için uygulanabilecek yüksek doğruluklu dijital gölge için işlevsel bir malzeme sağladığı sonucuna varılmıştır. Ayrıca, akıllı telefon/tablet ve HoloLens 2&#039;de yürütülen AR uygulamalarının, mühendislik, planlama ve kontrol süreçlerinin seviyesini yükseltme ve maden güvenliğini destekleme potansiyeline sahip olduğu öngörülmektedir.</p></trans-abstract>
                                                            
            
                                                            <kwd-group>
                                                    <kwd>AR</kwd>
                                                    <kwd>  Photogrammetry</kwd>
                                                    <kwd>  3D Model</kwd>
                                                    <kwd>  Quarry</kwd>
                                            </kwd-group>
                                                        
                                                                            <kwd-group xml:lang="tr">
                                                    <kwd>AR</kwd>
                                                    <kwd>  Fotogrametri</kwd>
                                                    <kwd>  3B Model</kwd>
                                                    <kwd>  Ocak</kwd>
                                            </kwd-group>
                                                                                                            </article-meta>
    </front>
    <back>
                            <ref-list>
                                    <ref id="ref1">
                        <label>1</label>
                        <mixed-citation publication-type="journal">[1] T. Zhan, K. Yin, J. Xiong, and Z. He, “Augmented reality and virtual reality displays: Perspectives and challenges,” iScience, vol. 23, no. 8, 2020, doi: 10.1016/j.isci.2020.101397.</mixed-citation>
                    </ref>
                                    <ref id="ref2">
                        <label>2</label>
                        <mixed-citation publication-type="journal">[2] P. Singh, V. Murthy, D. Kumar, and S. Raval, “A comprehensive review on application of drone, virtual reality and augmented reality with their application in dragline excavation monitoring in surface mines,” Geomatics, Nat. Hazards Risk, vol. 15, no. 1, 2024, doi: 10.1080/19475705.2024.2327399.</mixed-citation>
                    </ref>
                                    <ref id="ref3">
                        <label>3</label>
                        <mixed-citation publication-type="journal">[3] G. Lampropoulos, P. Fernández‐Arias, A. Antón‐Sancho, and D. Vergara, “Examining the role of augmented reality and virtual reality in safety training,” Electronics, vol. 13, no. 19, 2024, doi: 10.3390/electronics13193952.</mixed-citation>
                    </ref>
                                    <ref id="ref4">
                        <label>4</label>
                        <mixed-citation publication-type="journal">[4] F. Mana, P. Jeannette, E. Theresa, and J. Kietzmann, “Go boldly! Explore augmented reality (AR), virtual reality (VR), and mixed reality (MR) for business,” Bus. Horizons, vol. 61, no. 5, pp. 657–666, 2018, doi: 10.1016/j.bushor.2018.05.009.</mixed-citation>
                    </ref>
                                    <ref id="ref5">
                        <label>5</label>
                        <mixed-citation publication-type="journal">[5] J. Garzón, “An overview of twenty-five years of augmented reality in education,” Multimodal Technol. Interact., vol. 5, no. 7, 2021, doi: 10.3390/mti5070037.</mixed-citation>
                    </ref>
                                    <ref id="ref6">
                        <label>6</label>
                        <mixed-citation publication-type="journal">[6] V. Gheorghe, F. Girbacia, D. Mihai, B. Razvan, and G. Carmen, “Mapping the emergent trends in industrial augmented reality,” Electronics, vol. 12, no. 7, 2023, doi: 10.3390/electronics12071719.</mixed-citation>
                    </ref>
                                    <ref id="ref7">
                        <label>7</label>
                        <mixed-citation publication-type="journal">[7] C. Ke and X. Fan, “The renaissance of augmented reality in construction: history, present status and future directions,” Smart Sustain. Built Environ., 2020, doi: 10.1108/SASBE-08-2020-0124.</mixed-citation>
                    </ref>
                                    <ref id="ref8">
                        <label>8</label>
                        <mixed-citation publication-type="journal">[8] M. Thakra, A. Devare, and M. H. Devare, “Augmented reality (AR) and virtual reality (VR) for UAV swarm visualization,” in UAV Swarm Visualization, 2025, pp. 207–239, doi: 10.1007/979-8-8688-1047-3_6.</mixed-citation>
                    </ref>
                                    <ref id="ref9">
                        <label>9</label>
                        <mixed-citation publication-type="journal">[9] D. Drascic and P. Milgram, “Perceptual issues in augmented reality,” Proc. SPIE, vol. 2653, pp. 123–134, 1996, doi: 10.1117/12.237425.</mixed-citation>
                    </ref>
                                    <ref id="ref10">
                        <label>10</label>
                        <mixed-citation publication-type="journal">[10] A. Bhardwaj. “AR, VR and MR: What’s trending?” Open Source For You. https://www.opensourceforu.com/2024/03/ar-vr-and-mr-whats-trending/ (accessed Apr. 28, 2026).</mixed-citation>
                    </ref>
                                    <ref id="ref11">
                        <label>11</label>
                        <mixed-citation publication-type="journal">[11] A. Gör and C. Coşkun, “Augmented reality as an exhibition method,” Global J. Arts Educ., vol. 7, no. 3, 2017.</mixed-citation>
                    </ref>
                                    <ref id="ref12">
                        <label>12</label>
                        <mixed-citation publication-type="journal">[12] M. B. Ibáñez and C. Delgado-Kloos, “Augmented reality for STEM learning: A systematic review,” Comput. Educ., vol. 123, pp. 109–123, 2018, doi: 10.1016/j.compedu.2018.05.002.</mixed-citation>
                    </ref>
                                    <ref id="ref13">
                        <label>13</label>
                        <mixed-citation publication-type="journal">[13] E. Dzardanova and V. Kasapakis, “Virtual reality: A journey from vision to commodity,” IEEE Ann. Hist. Comput., vol. 45, no. 1, pp. 18–30, 2023, doi: 10.1109/MAHC.2022.3208774.</mixed-citation>
                    </ref>
                                    <ref id="ref14">
                        <label>14</label>
                        <mixed-citation publication-type="journal">[14] P. Milgram and F. Kishino, “A taxonomy of mixed reality visual displays,” IEICE Trans. Inf. Syst., vol. 77, no. 12, pp. 1321–1329, 1994.</mixed-citation>
                    </ref>
                                    <ref id="ref15">
                        <label>15</label>
                        <mixed-citation publication-type="journal">[15] Abigail. “The virtual spectrum- Understanding AR, MR, VR and XR.” CreatXR. https://creatxr.com/the-virtuality-spectrum-understanding-ar-mr-vr-and-xr/ (accessed Apr. 28, 2026).</mixed-citation>
                    </ref>
                                    <ref id="ref16">
                        <label>16</label>
                        <mixed-citation publication-type="journal">[16] C. Javvaji et al., “Immersive innovations: Exploring the diverse applications of virtual reality (VR) in healthcare,” Cureus, vol. 16, no. 3, 2024, doi: 10.7759/cureus.56137.</mixed-citation>
                    </ref>
                                    <ref id="ref17">
                        <label>17</label>
                        <mixed-citation publication-type="journal">[17] S. Shinde, V. Samale, N. Yede, and A. Pilay, “VR safety training for hazardous work,” Int. J. Multidiscip. Res. (IJFMR), vol. 7, no. 1, 2025, doi: 10.36948/ijfmr.2025.v07i01.</mixed-citation>
                    </ref>
                                    <ref id="ref18">
                        <label>18</label>
                        <mixed-citation publication-type="journal">[18] K. Erarslan and M. Özdemir, “Utilization of augmented and mixed reality in training mining machines,” ESTUDAM, vol. 5, no. 2, pp. 48–56, 2024, doi: 10.53608/estudambilisim.1583427.</mixed-citation>
                    </ref>
                                    <ref id="ref19">
                        <label>19</label>
                        <mixed-citation publication-type="journal">[19] L. Perfetti, S. Teruggi, C. Achille, and F. Fassi, “Rapid and low-cost photogrammetric survey of hazardous sites, from measurements to VR dissemination,” Int. Arch. Photogramm. Remote Sens. Spatial Inf. Sci., vol. XLVIII-2/W1-2022, pp. 207–214, 2022, doi: 10.5194/isprs-archives-XLVIII-2-W1-2022-207-2022.</mixed-citation>
                    </ref>
                                    <ref id="ref20">
                        <label>20</label>
                        <mixed-citation publication-type="journal">[20] ArcGIS. “Virtual reality (VR) and augmented reality (AR) with ArcGIS.” Esri MediaSpace. https://mediaspace.esri.com/media/t/1_e8v5bwla (accessed Apr. 28, 2026).</mixed-citation>
                    </ref>
                                    <ref id="ref21">
                        <label>21</label>
                        <mixed-citation publication-type="journal">[21] M. Baird, S. Haegler, and R. Hansen. “Virtual reality (VR) and augmented reality (AR) with ArcGIS.” ESRI Event. [suspicious link removed] (accessed Jan. 24, 2024).</mixed-citation>
                    </ref>
                                    <ref id="ref22">
                        <label>22</label>
                        <mixed-citation publication-type="journal">[22] A. Chaturvedi. “5 ways LiDAR is transforming the world before our eyes.” Geospatial World. https://www.geospatialworld.net/blogs/5-ways-lidar-is-transforming-the-world-before-our-eyes/ (accessed Dec. 22, 2021).</mixed-citation>
                    </ref>
                                    <ref id="ref23">
                        <label>23</label>
                        <mixed-citation publication-type="journal">[23] L. Duarte, A. C. Teodoro, O. Moutinho, and J. A. Goncalves, “Open-source GIS application for UAV photogrammetry based on MicMac,” Int. J. Remote Sens., vol. 38, no. 8-10, pp. 3181–3202, 2017, doi: 10.1080/01431161.2016.1259685.</mixed-citation>
                    </ref>
                                    <ref id="ref24">
                        <label>24</label>
                        <mixed-citation publication-type="journal">[24] A. Francois. “QGis for LIDAR: digital surface model (DSM) with CloudCompare and LAStools.” Blog GIS Territ. https://www.sigterritoires.fr/index.php/en/qgis-for-lidar-digital-surface-model-dsm-with-cloudcompare-and-lastools/ (accessed Aug. 20, 2023).</mixed-citation>
                    </ref>
                                    <ref id="ref25">
                        <label>25</label>
                        <mixed-citation publication-type="journal">[25] J. Jacobs, R. C. W. Webber-Youngman, and E. van Wyk, “Potential augmented reality applications in the mining industry,” 2016, doi: 10.13140/RG.2.2.27751.44961.</mixed-citation>
                    </ref>
                                    <ref id="ref26">
                        <label>26</label>
                        <mixed-citation publication-type="journal">[26] B. C. Kress and W. J. Cummings, “Towards the ultimate mixed reality experience: hololens display architecture choices,” Symp. Digest Tech. Papers, vol. 48, no. 1, pp. 127–131, 2017, doi: 10.1002/sdtp.11586.</mixed-citation>
                    </ref>
                                    <ref id="ref27">
                        <label>27</label>
                        <mixed-citation publication-type="journal">[27] D. D. Mascarenas et al., “Augmented reality for next generation infrastructure inspections,” Struct. Health Monit., vol. 20, no. 4, pp. 1957–1979, 2021, doi: 10.1177/1475921720953846.</mixed-citation>
                    </ref>
                                    <ref id="ref28">
                        <label>28</label>
                        <mixed-citation publication-type="journal">[28] P. Singh, V. Murthy, D. Kumar, and S. Raval, “A comprehensive review on application of drone, virtual reality and augmented reality with their application in dragline excavation monitoring in surface mines,” Geomatics, Nat. Hazards Risk, vol. 15, no. 1, 2024, doi: 10.1080/19475705.2024.2327399.</mixed-citation>
                    </ref>
                                    <ref id="ref29">
                        <label>29</label>
                        <mixed-citation publication-type="journal">[29] J. Suh, S. Lee, and Y. Choi, “UMineAR: mobile-tablet-based abandoned mine hazard site investigation support system using augmented reality,” Minerals, vol. 7, no. 10, p. 198, 2017, doi: 10.3390/min7100198.</mixed-citation>
                    </ref>
                                    <ref id="ref30">
                        <label>30</label>
                        <mixed-citation publication-type="journal">[30] Vuforia. “Spatial augmented reality with Vuforia engine in unity.” Unity Technologies. https://resources.unity.com/unitenow/onlinesessions/spatial-augmented-reality-with-vuforia-engine-in-unity (accessed Jan. 10, 2021).</mixed-citation>
                    </ref>
                                    <ref id="ref31">
                        <label>31</label>
                        <mixed-citation publication-type="journal">[31] Wingtra. “Drones for mining: how to use and choose what’s best.” Wingtra. https://wingtra.com/drone-mapping-applications/mining-and-aggregates (accessed Jan. 21, 2021).</mixed-citation>
                    </ref>
                                    <ref id="ref32">
                        <label>32</label>
                        <mixed-citation publication-type="journal">[32] J. Jacobs, R. C. W. Webber-Youngman, and E. van Wyk, “Potential augmented reality applications in the mining industry,” pp. 1–8, Jan. 2016, doi: 10.13140/RG.2.2.27751.44961.</mixed-citation>
                    </ref>
                                    <ref id="ref33">
                        <label>33</label>
                        <mixed-citation publication-type="journal">[33] L. Lattanzi et al., “Digital twin for smart manufacturing: a review of concepts towards a practical industrial implementation,” Int. J. Comput. Integr. Manuf., vol. 34, no. 6, pp. 567–597, 2020, doi: 10.1080/0951192X.2021.1911003.</mixed-citation>
                    </ref>
                                    <ref id="ref34">
                        <label>34</label>
                        <mixed-citation publication-type="journal">[34] J. Trauer et al., “What is a digital twin?–definitions and insights from an industrial case study in technical product development,” in Proc. Design Soc.: DESIGN Conf., vol. 1, 2020, pp. 757–766, doi: 10.1017/dsd.2020.15.</mixed-citation>
                    </ref>
                                    <ref id="ref35">
                        <label>35</label>
                        <mixed-citation publication-type="journal">[35] S. Baidya et al., “Digital twin in safety-critical robotics applications: Opportunities and challenges,” in Proc. IEEE Int. Perform. Comput. Commun. Conf. (IPCCC), 2022, pp. 101–107.</mixed-citation>
                    </ref>
                                    <ref id="ref36">
                        <label>36</label>
                        <mixed-citation publication-type="journal">[36] G. Pronost et al., “Towards a framework for the classification of digital twins and their applications,” IFAC-PapersOnLine, vol. 54, no. 1, 2021, doi: 10.1109/ICE/ITMC52061.2021.9570114.</mixed-citation>
                    </ref>
                                    <ref id="ref37">
                        <label>37</label>
                        <mixed-citation publication-type="journal">[37] F. von Haxthausen, Y. Chen, and F. Ernst, “Superimposing holograms on real world objects using HoloLens 2 and its depth camera,” Curr. Dir. Biomed. Eng., vol. 7, no. 1, p. 111, 2021, doi: 10.1515/cdbme-2021-1024.</mixed-citation>
                    </ref>
                                    <ref id="ref38">
                        <label>38</label>
                        <mixed-citation publication-type="journal">[38] S. Teruggi and F. Fassi, “Mixed reality content alignment in monumental environments,” Int. Arch. Photogramm. Remote Sens. Spatial Inf. Sci., p. 901, 2022, doi: 10.5194/isprs-archives-xliii-b2-2022-901-2022.</mixed-citation>
                    </ref>
                                    <ref id="ref39">
                        <label>39</label>
                        <mixed-citation publication-type="journal">[39] A. Ulvi, V. İzci, and A. Y. Yiğit, “Investigation of the effect of the number and distribution of ground control point (GCP) on map production accuracy,” Erciyes Univ. Fen Bilim. Enst. Fen Bilim. Derg., vol. 40, no. 2, pp. 167–180, 2024. [Online]. Available: https://izlik.org/JA95HP77LS</mixed-citation>
                    </ref>
                                    <ref id="ref40">
                        <label>40</label>
                        <mixed-citation publication-type="journal">[40] J. Knodt, “Single edge collapse quad-dominant mesh reduction,” arXiv, 2024, doi: 10.48550/arxiv.2411.16874.</mixed-citation>
                    </ref>
                                    <ref id="ref41">
                        <label>41</label>
                        <mixed-citation publication-type="journal">[41] H. D. Liu, X. Zhang, and C. Yuksel, “Simplifying triangle meshes in the wild,” arXiv, 2024, doi: 10.48550/arxiv.2409.15458.</mixed-citation>
                    </ref>
                                    <ref id="ref42">
                        <label>42</label>
                        <mixed-citation publication-type="journal">[42] H. K. Chang, J. Choi, and C. M. Yeum, “3D reconstruction by looking: Instantaneous blind spot detector for indoor SLAM through mixed reality,” arXiv, 2024, doi: 10.48550/arxiv.2411.12514.</mixed-citation>
                    </ref>
                                    <ref id="ref43">
                        <label>43</label>
                        <mixed-citation publication-type="journal">[43] H. C. Gagnon et al., “Gap affordance judgments in mixed reality: Testing the role of display weight and field of view,” Front. Virtual Real., vol. 2, 2021, doi: 10.3389/frvir.2021.654656.</mixed-citation>
                    </ref>
                                    <ref id="ref44">
                        <label>44</label>
                        <mixed-citation publication-type="journal">[44] N. Biswas, A. Mukherjee, and S. Bhattacharya, “Are you feeling sick? – A systematic literature review of cybersickness in virtual reality,” ACM Comput. Surv., vol. 56, no. 11, p. 1, 2024, doi: 10.1145/3670008.</mixed-citation>
                    </ref>
                                    <ref id="ref45">
                        <label>45</label>
                        <mixed-citation publication-type="journal">[45] I. Miguel-Alonso, D. Checa, H. Guillen-Sanz, and A. Bustillo, “Evaluation of the novelty effect in immersive virtual reality learning experiences,” Virtual Reality, vol. 28, no. 1, 2024, doi: 10.1007/s10055-023-00926-5.</mixed-citation>
                    </ref>
                                    <ref id="ref46">
                        <label>46</label>
                        <mixed-citation publication-type="journal">[46] B. Sobota and D. Cvetković, “Mixed reality and three-dimensional computer graphics,” in IntechOpen eBooks, IntechOpen, 2020, doi: 10.5772/intechopen.77405.</mixed-citation>
                    </ref>
                                    <ref id="ref47">
                        <label>47</label>
                        <mixed-citation publication-type="journal">[47] K. Essmiller et al., “Exploring mixed reality based on self-efficacy and motivation of users,” Res. Learn. Technol., vol. 28, 2020, doi: 10.25304/rlt.v28.2331.</mixed-citation>
                    </ref>
                                    <ref id="ref48">
                        <label>48</label>
                        <mixed-citation publication-type="journal">[48] N. Ezdina, E. Y. Dotsenko, E. V. Shavina, and Y. S. Valeeva, “Convergent technological and hyperconvergent forms of productivity improvement in the extractive sector of economy,” Int. J. Technol., vol. 15, no. 3, p. 571, 2024, doi: 10.14716/ijtech.v15i3.5661.</mixed-citation>
                    </ref>
                                    <ref id="ref49">
                        <label>49</label>
                        <mixed-citation publication-type="journal">[49] M. G. Don, T. R. Wanasinghe, R. G. Gosine, and P. Warrian, “Digital twins and enabling technology applications in mining: Research trends, opportunities, and challenges,” IEEE Access, vol. 13, p. 6945, 2025, doi: 10.1109/access.2025.3526881.</mixed-citation>
                    </ref>
                                    <ref id="ref50">
                        <label>50</label>
                        <mixed-citation publication-type="journal">[50] C. E. Emere, O. A. Oguntona, I. Ohiomah, and E. Ayorinde, “Harnessing emerging technologies in the global mining sector from a bibliometric standpoint,” Mining, vol. 5, no. 1, p. 13, 2025, doi: 10.3390/mining5010013.</mixed-citation>
                    </ref>
                                    <ref id="ref51">
                        <label>51</label>
                        <mixed-citation publication-type="journal">[51] J. Adams, F. Flavell, and R. Raureti, “Mixed reality results in vocational education: a case study with HoloLens 2,” Res. Learn. Technol., vol. 30, 2022, doi: 10.25304/rlt.v30.2803.</mixed-citation>
                    </ref>
                                    <ref id="ref52">
                        <label>52</label>
                        <mixed-citation publication-type="journal">[52] J. D. Valencia-Quiceno, V. Kecojević, A. McBrayer, and D. Bogunovic, “Augmented reality system for training of heavy equipment operators in surface mining,” Min. Metall. Explor., vol. 41, pp. 2217–2229, 2024, doi: 10.1007/s42461-024-01047-6.</mixed-citation>
                    </ref>
                                    <ref id="ref53">
                        <label>53</label>
                        <mixed-citation publication-type="journal">[53] V. Balaska, I. T. Papapetros, K. M. Oikonomou, L. Bampis, and Α. Gasteratos, “UAV object detection and positioning in a mining industrial metaverse with custom geo-referenced data,” arXiv, 2025, doi: 10.48550/arxiv.2506.13505.</mixed-citation>
                    </ref>
                                    <ref id="ref54">
                        <label>54</label>
                        <mixed-citation publication-type="journal">[54] Z. Niu, H. Xia, P. Tao, and T. Ke, “Accuracy assessment of UAV photogrammetry system with RTK measurements for direct georeferencing,” ISPRS Ann. Photogramm. Remote Sens. Spatial Inf. Sci., p. 169, 2024, doi: 10.5194/isprs-annals-x-1-2024-169-2024.</mixed-citation>
                    </ref>
                                    <ref id="ref55">
                        <label>55</label>
                        <mixed-citation publication-type="journal">[55] C. Tan et al., “Accuracy analysis of UAV aerial photogrammetry based on RTK mode, flight altitude, and number of GCPs,” Meas. Sci. Technol., vol. 35, no. 10, p. 106310, 2024, doi: 10.1088/1361-6501/ad5dd7.</mixed-citation>
                    </ref>
                                    <ref id="ref56">
                        <label>56</label>
                        <mixed-citation publication-type="journal">[56] S. M. Dlamini and Y. O. Ouma, “Large-scale topographic mapping using RTK-GNSS and multispectral UAV drone photogrammetric surveys: Comparative evaluation of experimental results,” Geomatics, vol. 5, no. 2, p. 25, 2025, doi: 10.3390/geomatics5020025.</mixed-citation>
                    </ref>
                                    <ref id="ref57">
                        <label>57</label>
                        <mixed-citation publication-type="journal">[57] S. Tadeja, Y. Lu, M. Rydlewicz, W. Rydlewicz, T. Bubas, and P. O. Kristensson, “Exploring gestural input for engineering surveys of real-life structures in virtual reality using photogrammetric 3D models,” Multimedia Tools Appl., vol. 80, no. 20, p. 31039, 2021, doi: 10.1007/s11042-021-10520-z.</mixed-citation>
                    </ref>
                                    <ref id="ref58">
                        <label>58</label>
                        <mixed-citation publication-type="journal">[58] M. H. A. Yusri, M. Johan, and M. H. M. Ramli, “Preservation of cultural heritage: A comparison study of 3D modelling between laser scanning, depth image and photogrammetry methods,” J. Mech. Eng., vol. 19, no. 2, pp. 125–145, 2022, doi: 10.24191/jmeche.v19i2.19768.</mixed-citation>
                    </ref>
                                    <ref id="ref59">
                        <label>59</label>
                        <mixed-citation publication-type="journal">[59] Y. Gao, É. Peillard, J. Normand, G. Moreau, Y. Liu, and Y. Wang, “Influence of virtual objects’ shadows and lighting coherence on distance perception in optical see‐through augmented reality,” J. Soc. Inf. Disp., vol. 28, no. 2, p. 117, 2019, doi: 10.1002/jsid.832.</mixed-citation>
                    </ref>
                                    <ref id="ref60">
                        <label>60</label>
                        <mixed-citation publication-type="journal">[60] J. M. Liu, G. Narasimham, J. K. Stefanucci, S. H. Creem-Regehr, and B. Bodenheimer, “Distance perception in modern mobile augmented reality,” in Proc. 2022 IEEE Conf. Virtual Real. 3D User Interfaces Abstr. Workshops (VRW), 2020, p. 196, doi: 10.1109/vrw50115.2020.00042.</mixed-citation>
                    </ref>
                                    <ref id="ref61">
                        <label>61</label>
                        <mixed-citation publication-type="journal">[61] M. Poupard, F. Larrue, H. Sauzeon, and A. Tricot, “A systematic review of immersive technologies for education: Learning performance, cognitive load and intrinsic motivation,” Br. J. Educ. Technol., vol. 56, no. 1, pp. 5–41, 2024, doi: 10.1111/bjet.13503.</mixed-citation>
                    </ref>
                                    <ref id="ref62">
                        <label>62</label>
                        <mixed-citation publication-type="journal">[62] M. Poupard, F. Larrue, H. Sauzeon, and A. Tricot, “A systematic review of immersive technologies for education: effects of cognitive load and curiosity state on learning performance,” HAL Open Science, pp. 1–38, 2025. [Online]. Available: https://hal.archives-ouvertes.fr/hal-03906797</mixed-citation>
                    </ref>
                                    <ref id="ref63">
                        <label>63</label>
                        <mixed-citation publication-type="journal">[63] K. Cheng, “Reading an augmented reality book: An exploration of learners’ cognitive load, motivation, and attitudes,” Australas. J. Educ. Technol., vol. 33, no. 4, pp. 53–69, 2017, doi: 10.14742/ajet.2820.</mixed-citation>
                    </ref>
                                    <ref id="ref64">
                        <label>64</label>
                        <mixed-citation publication-type="journal">[64] V. Candido and A. S. Cattaneo, “Applying cognitive theory of multimedia learning principles to augmented reality and its effects on cognitive load and learning outcomes,” Comput. Hum. Behav. Rep., vol. 18, p. 100678, 2025, doi: 10.1016/j.chbr.2025.100678.</mixed-citation>
                    </ref>
                            </ref-list>
                    </back>
    </article>
