<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.4 20241031//EN"
        "https://jats.nlm.nih.gov/publishing/1.4/JATS-journalpublishing1-4.dtd">
<article  article-type="reviewer-report"        dtd-version="1.4">
            <front>

                <journal-meta>
                                    <journal-id></journal-id>
            <journal-title-group>
                                                                                    <journal-title>Öneri Dergisi</journal-title>
            </journal-title-group>
                            <issn pub-type="ppub">1300-0845</issn>
                                        <issn pub-type="epub">2147-5377</issn>
                                                                                            <publisher>
                    <publisher-name>Marmara University</publisher-name>
                </publisher>
                    </journal-meta>
                <article-meta>
                                        <article-id/>
                                                                <article-categories>
                                            <subj-group  xml:lang="en">
                                                            <subject>Music Education</subject>
                                                            <subject>Music Technology and Recording</subject>
                                                            <subject>Musicology and Ethnomusicology</subject>
                                                            <subject>Music (Other)</subject>
                                                    </subj-group>
                                            <subj-group  xml:lang="tr">
                                                            <subject>Müzik Eğitimi</subject>
                                                            <subject>Müzik Teknolojisi ve Kayıt</subject>
                                                            <subject>Müzikoloji ve Etnomüzikoloji</subject>
                                                            <subject>Müzik (Diğer)</subject>
                                                    </subj-group>
                                    </article-categories>
                                                                                                                                                        <title-group>
                                                                                                                        <trans-title-group xml:lang="en">
                                    <trans-title>AN OVERVIEW OF ARTIFICIAL INTELLIGENCE IN THE CONTEXT OF MUSIC AND TÜRKİYE</trans-title>
                                </trans-title-group>
                                                                                                                                                                                                <article-title>Türkiye’de Müzik Ve Yapay Zeka Üzerine Bir Değerlendirme</article-title>
                                                                                                    </title-group>
            
                                                    <contrib-group content-type="authors">
                                                                        <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0002-3111-6917</contrib-id>
                                                                <name>
                                    <surname>Yavuz</surname>
                                    <given-names>Mehmet Selim</given-names>
                                </name>
                                                                    <aff>İSTANBUL ÜNİVERSİTESİ, DEVLET KONSERVATUVARI, MÜZİKOLOJİ BÖLÜMÜ</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0003-4534-7182</contrib-id>
                                                                <name>
                                    <surname>Karaosmanoğlu</surname>
                                    <given-names>Mustafa Kemal</given-names>
                                </name>
                                                                    <aff>YILDIZ TEKNİK ÜNİVERSİTESİ, SANAT VE TASARIM FAKÜLTESİ</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0003-2395-9380</contrib-id>
                                                                <name>
                                    <surname>Yeprem</surname>
                                    <given-names>Mehmet Safa</given-names>
                                </name>
                                                                    <aff>MARMARA ÜNİVERSİTESİ, İLAHİYAT FAKÜLTESİ, İSLAM TARİHİ VE SANATLARI BÖLÜMÜ, TÜRK DİN MUSİKİSİ ANABİLİM DALI</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0002-5322-6201</contrib-id>
                                                                <name>
                                    <surname>Karşıcı</surname>
                                    <given-names>Gülay</given-names>
                                </name>
                                                                    <aff>MARMARA ÜNİVERSİTESİ, GÜZEL SANATLAR FAKÜLTESİ, MÜZİK BÖLÜMÜ</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0001-7312-6737</contrib-id>
                                                                <name>
                                    <surname>Sezikli</surname>
                                    <given-names>Ubeydullah</given-names>
                                </name>
                                                                    <aff>İSTANBUL MEDENİYET ÜNİVERSİTESİ, SANAT VE TASARIM FAKÜLTESİ</aff>
                                                            </contrib>
                                                                                </contrib-group>
                        
                                        <pub-date pub-type="pub" iso-8601-date="20250424">
                    <day>04</day>
                    <month>24</month>
                    <year>2025</year>
                </pub-date>
                                        <volume>20</volume>
                                        <issue>MX Yaratıcı Endüstriler Çalıştayı 2024: Yapay Zeka Çağında Yaratıcı Endüstriler Özel Sayısı</issue>
                                        <fpage>50</fpage>
                                        <lpage>70</lpage>
                        
                        <history>
                                    <date date-type="received" iso-8601-date="20240611">
                        <day>06</day>
                        <month>11</month>
                        <year>2024</year>
                    </date>
                                                    <date date-type="accepted" iso-8601-date="20240930">
                        <day>09</day>
                        <month>30</month>
                        <year>2024</year>
                    </date>
                            </history>
                                        <permissions>
                    <copyright-statement>Copyright © 1994, Oneri</copyright-statement>
                    <copyright-year>1994</copyright-year>
                    <copyright-holder>Oneri</copyright-holder>
                </permissions>
            
                                                                                                <trans-abstract xml:lang="en">
                            <p>This article concerns the relationship between artificial intelligence technology and music in general, with a particular focus on Turkish music and music research (studies). Shaped by data obtained from a literature review, this paper explores the brief history of artificial intelligence technology, its application areas in music, artificial intelligence methods, techniques, and algorithms applied to music, traditional machine learning and deep learning methods in music, the importance of data in music artificial intelligence applications, and the impacts of artificial intelligence on music education and teaching. In addition, this paper briefly discusses several music projects in Turkey that have been conducted using artificial intelligence technology.</p></trans-abstract>
                                                                                                                                    <abstract><p>Bu makale yapay zeka teknolojisinin genel olarak müzikle, özel olarak Türk müziğiyle ve müzik araştırmalarıyla (incelemeleriyle) nasıl ilişkilendirildiği ile ilgilidir. Kaynak taraması yaparak elde edilen veriler ile şekillenen bu yazıda yapay zeka teknolojisinin çok kısa tarihi, müzikte uygulama alanları; müziğe uygulanmış yapay zeka yöntemleri, teknikleri, algoritmaları; geleneksel makine öğrenmesi ve müzikte derin öğrenme yöntemleri; verinin müzik yapay zeka uygulamalarındaki önemi; yapay zekanın müzik eğitimine ve öğretimine etkileri ele alındı. Bunlara ek olarak, bu yazıda Türkiye’de yapay zeka teknolojisi kullanılarak yapılan birkaç müzik projesinden de kısaca söz edildi.</p></abstract>
                                                            
            
                                                                                        <kwd-group>
                                                    <kwd>Yapay zeka</kwd>
                                                    <kwd>  Türk müziği</kwd>
                                                    <kwd>  Makine öğrenmesi</kwd>
                                                    <kwd>  Müzik eğitimi</kwd>
                                                    <kwd>  Müzik araştırmaları</kwd>
                                            </kwd-group>
                            
                                                <kwd-group xml:lang="en">
                                                    <kwd>Artificial intelligence</kwd>
                                                    <kwd>  Classical Turkish music</kwd>
                                                    <kwd>  Machine learning</kwd>
                                                    <kwd>  Music education</kwd>
                                                    <kwd>  Music research</kwd>
                                            </kwd-group>
                                                                                                                                        </article-meta>
    </front>
    <back>
                            <ref-list>
                                    <ref id="ref1">
                        <label>1</label>
                        <mixed-citation publication-type="journal">Alpkoçak, A., &amp; Gedik, A. C. (2006). Classification of Turkish songs according to makams by using n grams. Proceedings of the 15. Turkish Symposium on Artificial Intelligence and Neural Networks (TAINN). Turkish Symposium on Artificial Intelligence and Neural Networks.</mixed-citation>
                    </ref>
                                    <ref id="ref2">
                        <label>2</label>
                        <mixed-citation publication-type="journal">Anderson, A., Maystre, L., Anderson, I., Mehrotra, R., &amp; Lalmas, M. (2020). Algorithmic Effects on the Diversity of Consumption on Spotify. Proceedings of The Web Conference 2020, 2155-2165. https://doi.org/10.1145/3366423.3380281</mixed-citation>
                    </ref>
                                    <ref id="ref3">
                        <label>3</label>
                        <mixed-citation publication-type="journal">Becker, 	H. 	S. 	(1976). 	Art 	worlds 	and 	social 	types. 	American 	Behavioral 	Scientist, 	19(6), 	s. 	703-718. doi:https://doi.org/10.1177/000276427601900603</mixed-citation>
                    </ref>
                                    <ref id="ref4">
                        <label>4</label>
                        <mixed-citation publication-type="journal">Boulanger-Lewandowski, N., et al. (2012). Modeling Temporal Dependencies in High-Dimensional Sequences: Application to Polyphonic Music Generation and Transcription. Proceedings of the 29th International Conference on Machine Learning (ICML-12).</mixed-citation>
                    </ref>
                                    <ref id="ref5">
                        <label>5</label>
                        <mixed-citation publication-type="journal">Bozkurt, B., Gedik, A. C. &amp; Karaosmanolu, M. K. (2009). Türk Müziği için Müzik Bilgi Erişimi: problemler, çözüm önerileri ve araçlar. IEEE 17th Signal Processing and Communications Applications Conference, Antalya, Turkey, 2009, pp. 804-807, doi: 10.1109/SIU.2009.5136518.</mixed-citation>
                    </ref>
                                    <ref id="ref6">
                        <label>6</label>
                        <mixed-citation publication-type="journal">Börekci, A., &amp; Sevli, O. (2024). A classification study for Turkish folk music makam recognition using machine learning with data augmentation techniques. Neural Computing and Applications, 36(4), 1621-1639. https://doi.org/10.1007/s00521-02309177-6</mixed-citation>
                    </ref>
                                    <ref id="ref7">
                        <label>7</label>
                        <mixed-citation publication-type="journal">Brittain, 	B. 	(2024a). 	Music 	labels 	sue AI 	companies 	Suno, 	UdIO 	for 	U.S. 	copyright 	infringement. 	Reuters. https://www.reuters.com/technology/artificial-intelligence/music-labels-sue-ai-companies-suno-udio-us-copyrightinfringement-2024-06-24/</mixed-citation>
                    </ref>
                                    <ref id="ref8">
                        <label>8</label>
                        <mixed-citation publication-type="journal">Brittain, 	B. 	(2024b). 	Music 	labels, 	AI 	lawsuits 	create 	new 	copyright 	puzzle 	for 	US 	courts. 	Reuters. https://www.reuters.com/legal/music-labels-ai-lawsuits-create-new-copyright-puzzle-us-courts-2024-08-03/</mixed-citation>
                    </ref>
                                    <ref id="ref9">
                        <label>9</label>
                        <mixed-citation publication-type="journal">Bryan-Kinns, N., Zhang, B., Zhao, S., &amp; Banar, B. (2024). Exploring Variational Auto-encoder Architectures, Configurations, and Datasets for Generative Music Explainable AI. Machine Intelligence Research, 21(1), 29-45. https://doi.org/10.1007/s11633023-1457-1</mixed-citation>
                    </ref>
                                    <ref id="ref10">
                        <label>10</label>
                        <mixed-citation publication-type="journal">Cannam, C., Landone, C., Sandler, M., &amp; Bello, J. P. (2006). The Sonic Visualiser: A Visualisation Platform for Semantic Descriptors from Musical Signals. Easing Access to Sound Archives. Easing Access to Sound Archives.</mixed-citation>
                    </ref>
                                    <ref id="ref11">
                        <label>11</label>
                        <mixed-citation publication-type="journal">Civit, M., Civit-Masot, J., Cuadrado, F., &amp; Escalona, M. J. (2022). A systematic review of artificial intelligence-based music generation: 
Sco, 	applications, 	and 	future 	trends. 	Expert 	Systems 	with 	Applications, 	209, 	118190. https://doi.org/10.1016/j.eswa.2022.118190</mixed-citation>
                    </ref>
                                    <ref id="ref12">
                        <label>12</label>
                        <mixed-citation publication-type="journal">Collins, N. (2011). Live coding of Consequence. In Proceedings of the International Computer Music Conference (ICMC).</mixed-citation>
                    </ref>
                                    <ref id="ref13">
                        <label>13</label>
                        <mixed-citation publication-type="journal">Correya, A. A., Marcos Fernández, J., Joglar-Ongay, L., Alonso Jiménez, P., Serra, X., &amp; Bogdanov, D. (2021). Audio and music analysis on the web using Essentia.js. Transactions of the International Society for Music Information Retrieval. Transactions of the International Society for Music Information Retrieval. 
https://doi.org/10.5334/tismir.111</mixed-citation>
                    </ref>
                                    <ref id="ref14">
                        <label>14</label>
                        <mixed-citation publication-type="journal">David E. Rumelhart, G. E. (1986). Learning representations by back-propagating errors. Nature, 323, s. 533-536. doi:https://doi.org/10.1038/323533a0</mixed-citation>
                    </ref>
                                    <ref id="ref15">
                        <label>15</label>
                        <mixed-citation publication-type="journal">Fışkın, Ü. (2024). Examination of Ulvi Cemal Erkin’s Piano Concerto with GTTM. Journal for the Interdisciplinary Art and Education, 5(1), Article 1. https://doi.org/10.5281/zenodo.10866655</mixed-citation>
                    </ref>
                                    <ref id="ref16">
                        <label>16</label>
                        <mixed-citation publication-type="journal">Friconnet, G. (2023). A k-means clustering and histogram-based colorimetric analysis of metal album artworks: The colour palette of metal music. İçinde Metal Music Studies (C. 9, Sayı 1, ss. 77-100). Intellect. https://doi.org/10.1386/mms_00095_1</mixed-citation>
                    </ref>
                                    <ref id="ref17">
                        <label>17</label>
                        <mixed-citation publication-type="journal">Harris, T. (t.y.). Folk RNN. Geliş tarihi 29 Mart 2024, gönderen https://folkrnn.org/</mixed-citation>
                    </ref>
                                    <ref id="ref18">
                        <label>18</label>
                        <mixed-citation publication-type="journal">Hash—AI-powered Music Notation Tool. (t.y.). Geliş tarihi 29 Mart 2024, gönderen https://hash-music.com</mixed-citation>
                    </ref>
                                    <ref id="ref19">
                        <label>19</label>
                        <mixed-citation publication-type="journal">Hewlett, W. B., &amp; Selfridge-Field, E. (Ed.). (2001). The Virtual Score, Volume 12: Representation, Retrieval, Restoration. The MIT Press. https://doi.org/10.7551/mitpress/2058.001.0001</mixed-citation>
                    </ref>
                                    <ref id="ref20">
                        <label>20</label>
                        <mixed-citation publication-type="journal">İmseytoğlu, D., &amp; Yıldız, S. (2012). Yenidoğan Yoğun Bakım Ünitelerinde Müzik Terapi. İÜFN Hem. Derg, 20(2), 160-165.</mixed-citation>
                    </ref>
                                    <ref id="ref21">
                        <label>21</label>
                        <mixed-citation publication-type="journal">Good, M. (2001). MusicXML for notation and analysis. The virtual score: representation, retrieval, restoration, 12(113–124), 160.</mixed-citation>
                    </ref>
                                    <ref id="ref22">
                        <label>22</label>
                        <mixed-citation publication-type="journal">Goodfellow, I., Bengio, Y., Courville, A. (2016). Deep Learning. The MIT Press.</mixed-citation>
                    </ref>
                                    <ref id="ref23">
                        <label>23</label>
                        <mixed-citation publication-type="journal">Jakubowski, K., Eerola, T., Alborno, P., Volpe, G., Camurri, A., &amp; Clayton, M. (2017). Extracting Coarse Body Movements from Video in Music Performance: A Comparison of Automated Computer Vision Techniques with Motion Capture Data. Frontiers in Digital Humanities, 4. https://doi.org/10.3389/fdigh.2017.00009</mixed-citation>
                    </ref>
                                    <ref id="ref24">
                        <label>24</label>
                        <mixed-citation publication-type="journal">Newell, A., &amp; Simon, H. A. (1956). The Logic Theory Machine A Complex Information Processing System. in IRE Transactions on Information Theory, 2(3), s. 61-79. doi:doi: 10.1109/TIT.1956.1056797</mixed-citation>
                    </ref>
                                    <ref id="ref25">
                        <label>25</label>
                        <mixed-citation publication-type="journal">Kaliakatsos-Papakostas, M., Floros, A., &amp; Vrahatis, M. N. (2020). Artificial intelligence methods for music generation: A review and future perspectives. Içinde Nature-Inspired Computation and Swarm Intelligence (ss. 217-245). Elsevier.  https://doi.org/10.1016/B978-0-12-819714-1.00024-5</mixed-citation>
                    </ref>
                                    <ref id="ref26">
                        <label>26</label>
                        <mixed-citation publication-type="journal">Kaplan, 	T. 	(2024). 	Probabilistic 	Models 	of 	Rhythmic 	Expectation 	&amp; 	Synchronisation 	[Doktora 	Tezi, 	QMUL]. https://qmro.qmul.ac.uk/xmlui/handle/123456789/94727</mixed-citation>
                    </ref>
                                    <ref id="ref27">
                        <label>27</label>
                        <mixed-citation publication-type="journal">Karaosmanoğlu, M. K. (2012). A Turkish makam music symbolic database for music information retrieval: SymbTr. Proceedings of 13th International Society for Music Information Retrieval Conference (ISMIR). 223-228. Porto, Portugal. http://repositori.upf.edu/handle/10230/25700</mixed-citation>
                    </ref>
                                    <ref id="ref28">
                        <label>28</label>
                        <mixed-citation publication-type="journal">Karaosmanoğlu, M. K. ve Taşçı, F. (2014). Türk Musikisi İçin Symbtr Sembolik Derlemi Üzerinde Otomatik Ezgi Analiz. Porte Akademik: Müzik ve Dans Araştırmaları Dergisi. Müzikte Kuram. Sayı.10. 98-115.</mixed-citation>
                    </ref>
                                    <ref id="ref29">
                        <label>29</label>
                        <mixed-citation publication-type="journal">Kong, Q., Li, B., Chen, J., &amp; Wang, Y. (2022). GiantMIDI-Piano: A large-scale MIDI dataset for classical piano music (arXiv:2010.07061; Version 3). arXiv. https://doi.org/10.48550/arXiv.2010.07061</mixed-citation>
                    </ref>
                                    <ref id="ref30">
                        <label>30</label>
                        <mixed-citation publication-type="journal">Kruspe, A. (2024). More than words: Advancements and challenges in speech recognition for singing (arXiv:2403.09298). arXiv. https://doi.org/10.48550/arXiv.2403.09298</mixed-citation>
                    </ref>
                                    <ref id="ref31">
                        <label>31</label>
                        <mixed-citation publication-type="journal">Kwon, H.-J., Kim, M.-J., Baek, J.-W., &amp; Chung, K. (2022). Voice Frequency Synthesis using VAW-GAN based Amplitude Scaling for Emotion Transformation. KSII Transactions on Internet and Information Systems (TIIS), 16(2), 713-725. 
https://doi.org/10.3837/tiis.2022.02.018</mixed-citation>
                    </ref>
                                    <ref id="ref32">
                        <label>32</label>
                        <mixed-citation publication-type="journal">MIDI 1.0 – MIDI.org. (t.y.). Erişilme tarihi 29 Mart 2024, adres https://midi.org/midi-1-0 Mitchell, T. M. (1997). Machine Learning. McGraw-Hill.</mixed-citation>
                    </ref>
                                    <ref id="ref33">
                        <label>33</label>
                        <mixed-citation publication-type="journal">Music Ai. (t.y.). Erişilme tarihi 29 Mart 2024, adres https://meetyourai.github.io/MusicAI/</mixed-citation>
                    </ref>
                                    <ref id="ref34">
                        <label>34</label>
                        <mixed-citation publication-type="journal">Neutron 4. (t.y.). Erişilme tarihi 29 Mart 2024, adres https://www.izotope.com/en/products/neutron.html</mixed-citation>
                    </ref>
                                    <ref id="ref35">
                        <label>35</label>
                        <mixed-citation publication-type="journal">Quinlan, J. (1986, March). Induction of decision trees. Centre for Advanced Computing Sciences, New South 
Wales Institute of Technology, Sydney. Boston: Kluwer Academic Publishers. doi:https://doi.org/10.1007/BF00116251</mixed-citation>
                    </ref>
                                    <ref id="ref36">
                        <label>36</label>
                        <mixed-citation publication-type="journal">Pan, S. J., &amp; Yang, Q. (2010). A Survey on Transfer Learning. IEEE Transactions on Knowledge and Data Engineering, 22(10), 13451359.</mixed-citation>
                    </ref>
                                    <ref id="ref37">
                        <label>37</label>
                        <mixed-citation publication-type="journal">Parlak, İ. H. (2021). Derin öğrenme teknikleri kullanılarak Türk makam müziği bestelenmesi [Doktora Tezi, 
Dokuz Eylül Üniversitesi]. https://tez.yok.gov.tr/UlusalTezMerkezi/tezSorguSonucYeni.jsp</mixed-citation>
                    </ref>
                                    <ref id="ref38">
                        <label>38</label>
                        <mixed-citation publication-type="journal">Parlak, İ. H., Çebi Y., Işıkhan C. &amp; Birant D. (2021). Deep Learning for Turkish Makam Music Composition. Turkish Journal of Electrical Engineering and Computer Sciences, 29(7), pp.3107-3118. doi:10.3906/elk-2101-44  pausetv (Direktör).</mixed-citation>
                    </ref>
                                    <ref id="ref39">
                        <label>39</label>
                        <mixed-citation publication-type="journal">(2024). Cem Karaca galaya hologram olarak katıldı. https://www.youtube.com/watch?v=xq8lw6bZCmU</mixed-citation>
                    </ref>
                                    <ref id="ref40">
                        <label>40</label>
                        <mixed-citation publication-type="journal">RuMind Music Meditation App. (t.y.). Erişilme tarihi 23 Mayıs 2024, adres https://www.rumindapp.com/</mixed-citation>
                    </ref>
                                    <ref id="ref41">
                        <label>41</label>
                        <mixed-citation publication-type="journal">Semantic Scholar | About Us. (t.y.). Erişilme tarihi 23 Mayıs 2024, adres https://www.semanticscholar.org/about</mixed-citation>
                    </ref>
                                    <ref id="ref42">
                        <label>42</label>
                        <mixed-citation publication-type="journal">Wang, A. L.-C. (2003). An Industrial Strength Audio Search Algorithm. Proceedings of the 4th 
International Society for Music Information Retrieval Conference (ISMIR 2003), Baltimore,  Maryland (USA), 26-30 October 2003, 7–13. https://doi.org/10.1109/IITAW.2009.110</mixed-citation>
                    </ref>
                                    <ref id="ref43">
                        <label>43</label>
                        <mixed-citation publication-type="journal">Suno AI. (t.y.). Erişilme tarihi 29 Mart 2024, adres https://www.suno.ai/</mixed-citation>
                    </ref>
                                    <ref id="ref44">
                        <label>44</label>
                        <mixed-citation publication-type="journal">Raffel, C. (2016). Learning-Based Methods for Comparing Sequences, with Applications to Audio-to-MIDI Alignment and Matching. PhD Thesis.</mixed-citation>
                    </ref>
                                    <ref id="ref45">
                        <label>45</label>
                        <mixed-citation publication-type="journal">Weiss, S. M., &amp; Indurkhya, N. (1995, December 1). Rule-based Machine Learning Methods for Functional Prediction. Journal of Artificial Intelligence Research, 3, s. 383-403. doi:https://doi.org/10.1613/jair.199</mixed-citation>
                    </ref>
                                    <ref id="ref46">
                        <label>46</label>
                        <mixed-citation publication-type="journal">Wen, Z., Chen, A., Zhou, G., Yi, J., &amp; Peng, W. (2024). Parallel attention of representation global time–frequency correlation for music genre classification. Multimedia Tools and Applications, 83(4), 10211-10231. https://doi.org/10.1007/s11042-023-16024-2</mixed-citation>
                    </ref>
                                    <ref id="ref47">
                        <label>47</label>
                        <mixed-citation publication-type="journal">Widmer, G. (2003). Discovering simple rules in complex data: A meta-learning algorithm and some surprising musical discoveries. Artificial Intelligence, 146(2), s. 129-148. doi:https://doi.org/10.1016/S0004-3702(03)00016-X</mixed-citation>
                    </ref>
                                    <ref id="ref48">
                        <label>48</label>
                        <mixed-citation publication-type="journal">Wijaya, N. N., Setiadi, D. R. I. M., &amp; Muslikh, A. R. (2024). Music-Genre Classification using Bidirectional Long Short-Term Memory and Mel-Frequency Cepstral Coefficients. Journal of Computing Theories and Applications, 2(1), Article 1. https://doi.org/10.62411/jcta.9655</mixed-citation>
                    </ref>
                                    <ref id="ref49">
                        <label>49</label>
                        <mixed-citation publication-type="journal">Thickstun, J., Harchaoui, Z., &amp; Kakade, S. (2017). Learning Features of Music from Scratch (arXiv:1611.09827). arXiv. http://arxiv.org/abs/1611.09827</mixed-citation>
                    </ref>
                                    <ref id="ref50">
                        <label>50</label>
                        <mixed-citation publication-type="journal">Tzanetakis, G., &amp; Cook, P. (2002). Musical genre classification of audio signals. IEEE Transactions on Speech and Audio Processing, 10(5), 293-302.</mixed-citation>
                    </ref>
                                    <ref id="ref51">
                        <label>51</label>
                        <mixed-citation publication-type="journal">Yıldız, Ş., Güvener, D., &amp; Güray, C. (2024). Adakale Türkülerinin Yeniden Yapılandırılması için bir Algoritmik Kompozisyon Yöntemi Arayışı. Sahne ve Müzik Eğitim - Araştırma e-Dergisi, 10(18), 80-100.</mixed-citation>
                    </ref>
                                    <ref id="ref52">
                        <label>52</label>
                        <mixed-citation publication-type="journal">Yücel, İ. E. (2022). MIXPREP: Machine Learning-Based Multitrack Mix Preparation Assistant [Doktora Tezi]. İstanbul Teknik  Üniversitesi.</mixed-citation>
                    </ref>
                            </ref-list>
                    </back>
    </article>
