<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.4 20241031//EN"
        "https://jats.nlm.nih.gov/publishing/1.4/JATS-journalpublishing1-4.dtd">
<article  article-type="research-article"        dtd-version="1.4">
            <front>

                <journal-meta>
                                                                <journal-id>konjes</journal-id>
            <journal-title-group>
                                                                                    <journal-title>Konya Journal of Engineering Sciences</journal-title>
            </journal-title-group>
                                        <issn pub-type="epub">2667-8055</issn>
                                                                                            <publisher>
                    <publisher-name>Konya Technical University</publisher-name>
                </publisher>
                    </journal-meta>
                <article-meta>
                                        <article-id pub-id-type="doi">10.36306/konjes.1649691</article-id>
                                                                <article-categories>
                                            <subj-group  xml:lang="en">
                                                            <subject>Biomedical Engineering (Other)</subject>
                                                            <subject>Electrical Engineering (Other)</subject>
                                                            <subject>Electronics, Sensors and Digital Hardware (Other)</subject>
                                                    </subj-group>
                                            <subj-group  xml:lang="tr">
                                                            <subject>Biyomedikal Mühendisliği (Diğer)</subject>
                                                            <subject>Elektrik Mühendisliği (Diğer)</subject>
                                                            <subject>Elektronik, Sensörler ve Dijital Donanım (Diğer)</subject>
                                                    </subj-group>
                                    </article-categories>
                                                                                                                                                        <title-group>
                                                                                                                                                            <article-title>DEVELOPMENT OF A TERNARY LEVELS EMOTION CLASSIFICATION MODEL UTILIZING ELECTROENCEPHALOGRAPHY DATA SET</article-title>
                                                                                                    </title-group>
            
                                                    <contrib-group content-type="authors">
                                                                        <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0003-4074-2503</contrib-id>
                                                                <name>
                                    <surname>Okumuş</surname>
                                    <given-names>Hatice</given-names>
                                </name>
                                                                    <aff>KARADENIZ TECHNICAL UNIVERSITY</aff>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0002-5371-7238</contrib-id>
                                                                <name>
                                    <surname>Ergün</surname>
                                    <given-names>Ebru</given-names>
                                </name>
                                                                    <aff>RECEP TAYYIP ERDOGAN UNIVERSITY</aff>
                                                            </contrib>
                                                                                </contrib-group>
                        
                                        <pub-date pub-type="pub" iso-8601-date="20250601">
                    <day>06</day>
                    <month>01</month>
                    <year>2025</year>
                </pub-date>
                                        <volume>13</volume>
                                        <issue>2</issue>
                                        <fpage>607</fpage>
                                        <lpage>623</lpage>
                        
                        <history>
                                    <date date-type="received" iso-8601-date="20250302">
                        <day>03</day>
                        <month>02</month>
                        <year>2025</year>
                    </date>
                                                    <date date-type="accepted" iso-8601-date="20250523">
                        <day>05</day>
                        <month>23</month>
                        <year>2025</year>
                    </date>
                            </history>
                                        <permissions>
                    <copyright-statement>Copyright © 2004, Konya Journal of Engineering Sciences</copyright-statement>
                    <copyright-year>2004</copyright-year>
                    <copyright-holder>Konya Journal of Engineering Sciences</copyright-holder>
                </permissions>
            
                                                                                                                        <abstract><p>Electroencephalogram (EEG)-based emotion recognition has gained increasing attention due to its potential in objectively assessing affective states. However, many existing studies rely on limited datasets and focus on binary classification or narrow feature sets, limiting the granularity and generalizability of their findings. To address these challenges, this study explores a ternary classification framework for both valence and arousal dimensions—dividing each into low, medium, and high levels—to capture a broader spectrum of emotional responses. EEG recordings from ten randomly selected participants in the DEAP dataset were used. Each 60-second EEG segment was divided into six non-overlapping windows of 10 seconds to preserve temporal stability and extract reliable features. The Hilbert Transform was applied to compute instantaneous amplitude and phase information, enabling the detection of subtle variations in emotional states. These features were then classified using a feed-forward neural network. The proposed approach achieved impressive classification accuracies of 99.13% for arousal and 99.50% for valence, demonstrating its effectiveness in recognizing multi-level emotional states. By moving beyond binary labels and leveraging time-frequency domain features, this study contributes to the development of more refined and responsive emotion recognition systems. These findings offer promising insights for real-world applications in affective computing, mental health monitoring, and adaptive human-computer interaction, where precise emotion modeling plays a critical role.</p></abstract>
                                                            
            
                                                                                        <kwd-group>
                                                    <kwd>Electroencephalography</kwd>
                                                    <kwd>  Emotion</kwd>
                                                    <kwd>  Feature Extraction</kwd>
                                                    <kwd>  Hilbert Transform</kwd>
                                                    <kwd>  Multi-Classification</kwd>
                                                    <kwd>  Signal Processing</kwd>
                                            </kwd-group>
                            
                                                                                                                                                    </article-meta>
    </front>
    <back>
                            <ref-list>
                                    <ref id="ref1">
                        <label>1</label>
                        <mixed-citation publication-type="journal">M. G. Huddar, S. S. Sannakki, and V. S. Rajpurohit, &quot;Attention-based multi-modal sentiment analysis and emotion detection in conversation using RNN,&quot; Int. J. Interact. Multimedia Artif. Intell, vol. 6, no. 6, June 2021, doi: 10.9781/ijimai.2020.07.004.</mixed-citation>
                    </ref>
                                    <ref id="ref2">
                        <label>2</label>
                        <mixed-citation publication-type="journal">W. Rahmouni, G. Bachir, and M. Aillerie, &quot;A new control strategy for harmonic reduction in photovoltaic inverters inspired by the autonomous nervous system,&quot; J. Electr. Eng., vol. 73, no. 5, pp. 310–317, September 2022, doi: 10.2478/jee-2022-0041.</mixed-citation>
                    </ref>
                                    <ref id="ref3">
                        <label>3</label>
                        <mixed-citation publication-type="journal">L. George and H. Hadi, &quot;User identification and verification from a pair of simultaneous EEG channels using transform-based features,&quot; Int. J. Interact. Multimedia Artif. Intell., vol. 5, no. 5,pp. 54-62,  June 2019, doi: 10.9781/ijimai.2018.12.008.</mixed-citation>
                    </ref>
                                    <ref id="ref4">
                        <label>4</label>
                        <mixed-citation publication-type="journal">J. Wang and M. Wang, &quot;Review of the emotional feature extraction and classification using EEG signals,&quot; Cogn. Robot., vol. 1, pp. 29–40, April 2021, doi: 10.1016/j.cogr.2021.04.001.</mixed-citation>
                    </ref>
                                    <ref id="ref5">
                        <label>5</label>
                        <mixed-citation publication-type="journal">W. L. Zheng and B. L. Lu, &quot;Investigating critical frequency bands and channels for EEG-based emotion recognition with deep neural networks,&quot; IEEE Trans. Auton. Ment. Dev., vol. 7, no. 3, pp. 162–175, May 2015, doi: 10.1109/TAMD.2015.2431497.</mixed-citation>
                    </ref>
                                    <ref id="ref6">
                        <label>6</label>
                        <mixed-citation publication-type="journal">R. N. Duan, J. Y. Zhu, and B. L. Lu, &quot;Differential entropy feature for EEG-based emotion classification,&quot; in Proc. 6th Int. IEEE/EMBS Conf. Neural Eng. (NER), San Diego, CA, USA, November 2013, pp. 81–84, doi: 10.1109/NER.2013.6695889.</mixed-citation>
                    </ref>
                                    <ref id="ref7">
                        <label>7</label>
                        <mixed-citation publication-type="journal">J. Li, Z. Zhang, and H. He, &quot;Hierarchical convolutional neural networks for EEG-based emotion recognition,&quot; Cogn. Comput., vol. 10, pp. 368–380, April 2018, doi: 10.1007/s12559-017-9533-x.</mixed-citation>
                    </ref>
                                    <ref id="ref8">
                        <label>8</label>
                        <mixed-citation publication-type="journal">M. A. Asghar et al., &quot;EEG-based multi-modal emotion recognition using bag of deep features: An optimal feature selection approach,&quot; Sensors, vol. 19, no. 23, p. 5218, November 2019, doi: 10.3390/s19235218.</mixed-citation>
                    </ref>
                                    <ref id="ref9">
                        <label>9</label>
                        <mixed-citation publication-type="journal">K. H. Cheah, H. Nisar, V. V. Yap, C. Y. Lee, and G. R. Sinha, &quot;Optimizing residual networks and VGG for classification of EEG signals: Identifying ideal channels for emotion recognition,&quot; J. Healthcare Eng., vol. 1, pp. 1–10, March 2021, doi: 10.1155/2021/5599615.</mixed-citation>
                    </ref>
                                    <ref id="ref10">
                        <label>10</label>
                        <mixed-citation publication-type="journal">G. Xiao et al., &quot;4D attention-based neural network for EEG emotion recognition,&quot; Cogn. Neurodynamics, vol. 16, pp. 1–14, January 2022, doi: 10.1007/s11571-021-09751-5.</mixed-citation>
                    </ref>
                                    <ref id="ref11">
                        <label>11</label>
                        <mixed-citation publication-type="journal">M. Jin, H. Chen, Z. Li, and J. Li, &quot;EEG-based emotion recognition using graph convolutional network with learnable electrode relations,&quot; in Proc. 43rd Annu. Int. Conf. IEEE Eng. Med. Biol. Soc. (EMBC), Mexico, November 2021, pp. 5953–5957, doi: 10.1109/EMBC46164.2021.9630062.</mixed-citation>
                    </ref>
                                    <ref id="ref12">
                        <label>12</label>
                        <mixed-citation publication-type="journal">S. Katsigiannis and N. Ramzan, &quot;DREAMER: A database for emotion recognition through EEG and ECG signals from wireless low-cost off-the-shelf devices,&quot; IEEE J. Biomed. Health Inform., vol. 22, no. 1, pp. 98–107, March 2017, doi: 10.1109/JBHI.2017.2688239.</mixed-citation>
                    </ref>
                                    <ref id="ref13">
                        <label>13</label>
                        <mixed-citation publication-type="journal">S. Koelstra et al., &quot;DEAP: A database for emotion analysis using physiological signals,&quot; IEEE Trans. Affect. Comput., vol. 3, no. 1, pp. 18–31, December 2011, doi: 10.1109/T-AFFC.2011.15.</mixed-citation>
                    </ref>
                                    <ref id="ref14">
                        <label>14</label>
                        <mixed-citation publication-type="journal">T. Song, W. Zheng, P. Song, and Z. Cui, &quot;EEG emotion recognition using dynamical graph convolutional neural networks,&quot; IEEE Trans. Affect. Comput., vol. 11, no. 3, pp. 532–541, March 2018, doi: 10.1109/TAFFC.2018.2817622.</mixed-citation>
                    </ref>
                                    <ref id="ref15">
                        <label>15</label>
                        <mixed-citation publication-type="journal">T. Zhang, X. Wang, X. Xu, and C. P. Chen, &quot;GCB-Net: Graph convolutional broad network and its application in emotion recognition,&quot; IEEE Trans. Affect. Comput., vol. 13, no. 1, pp. 379–388, August 2019, doi: 10.1109/TAFFC.2019.2937768.</mixed-citation>
                    </ref>
                                    <ref id="ref16">
                        <label>16</label>
                        <mixed-citation publication-type="journal">R. Li et al., &quot;SSTD: A novel spatio-temporal demographic network for EEG-based emotion recognition,&quot; IEEE Trans. Comput. Soc. Syst., vol. 10, no. 1, pp. 376–387, January 2022, doi: 10.1109/TCSS.2022.3188891.</mixed-citation>
                    </ref>
                                    <ref id="ref17">
                        <label>17</label>
                        <mixed-citation publication-type="journal">K. Lin, L. Zhang, J. Cai, J. Sun, W. Cui, and G. Liu, &quot;DSE-Mixer: A pure multilayer perceptron network for emotion recognition from EEG feature maps,&quot; J. Neurosci. Methods, vol. 401, January 2024, doi: 10.1016/j.jneumeth.2023.110008.</mixed-citation>
                    </ref>
                                    <ref id="ref18">
                        <label>18</label>
                        <mixed-citation publication-type="journal">Z. Gao, Y. Li, Y. Yang, X. Wang, N. Dong, and H. D. Chiang, &quot;A GPSO-optimized convolutional neural networks for EEG-based emotion recognition,&quot; Neurocomputing, vol. 380, pp. 225–235, March 2020, doi: 10.1016/j.neucom.2019.10.096.</mixed-citation>
                    </ref>
                                    <ref id="ref19">
                        <label>19</label>
                        <mixed-citation publication-type="journal">H. Chao and L. Dong, &quot;Emotion recognition using three-dimensional feature and convolutional neural network from multichannel EEG signals,&quot; IEEE Sensors J., vol. 21, no. 2, pp. 2024–2034, September 2020, doi: 10.1109/JSEN.2020.3020828.</mixed-citation>
                    </ref>
                                    <ref id="ref20">
                        <label>20</label>
                        <mixed-citation publication-type="journal">K. Martín-Chinea, J. Ortega, J. F. Gómez-González, E. Pereda, J. Toledo, &amp; L. Acosta, “Effect of time windows in LSTM networks for EEG-based BCIs. Cognitive Neurodynamics”, vol. 17, no.2, 385-398, April 2023. doi: 10.1007/s11571-022-09832-z.</mixed-citation>
                    </ref>
                                    <ref id="ref21">
                        <label>21</label>
                        <mixed-citation publication-type="journal">S. Zhou, B. Chen, Y. Zhang, H. Liu, Y. Xiao, and X. Pan, &quot;A feature extraction method based on feature fusion and its application in the text-driven failure diagnosis field,&quot; Int. J. Interact. Multimedia Artif. Intell., vol. 6, no. 4, pp. 121-130, December 2020, doi: 10.9781/ijimai.2020.11.006.</mixed-citation>
                    </ref>
                                    <ref id="ref22">
                        <label>22</label>
                        <mixed-citation publication-type="journal">E. Ergün and O. Aydemir, &quot;A Hybrid BCI Using Singular Value Decomposition Values of the Fast Walsh–Hadamard Transform Coefficients,&quot; IEEE Trans. Cogn. Dev. Syst., vol. 15, no. 2, pp. 454–463, October 2020, doi: 10.1109/TCDS.2020.3028785.</mixed-citation>
                    </ref>
                                    <ref id="ref23">
                        <label>23</label>
                        <mixed-citation publication-type="journal">H. M. Emara et al., &quot;Hilbert transform and statistical analysis for channel selection and epileptic seizure prediction,&quot; Wireless Pers. Commun., vol. 116, pp. 3371–3395, January 2021, doi: 10.1007/s11277-020-07857-3.</mixed-citation>
                    </ref>
                                    <ref id="ref24">
                        <label>24</label>
                        <mixed-citation publication-type="journal">E. Ergün, &quot;Artificial Intelligence Approaches for Accurate Assessment of Insulator Cleanliness in High-Voltage Electrical Systems,&quot; Electr. Eng., pp. 1–16, August 2024, doi: 10.1007/s00202-024-02691-.</mixed-citation>
                    </ref>
                                    <ref id="ref25">
                        <label>25</label>
                        <mixed-citation publication-type="journal">E. Yavuz, and Ö. Aydemir, “Classification of EEG based BCI signals imagined hand closing and opening”. In IEEE 40th International Conference on Telecommunications and Signal Processing (TSP), pp. 425-428, July 2017, doi: 10.1109/TSP.2017.8076020.</mixed-citation>
                    </ref>
                                    <ref id="ref26">
                        <label>26</label>
                        <mixed-citation publication-type="journal">D. Svozil, V. Kvasnicka, and J. Pospichal, &quot;Introduction to multi-layer feed-forward neural networks,&quot; Chemom. Intell. Lab. Syst., vol. 39, no. 1, pp. 43–62, November 1997, doi: 10.1016/S0169-7439(97)00061-0.</mixed-citation>
                    </ref>
                                    <ref id="ref27">
                        <label>27</label>
                        <mixed-citation publication-type="journal">H. Choubey and A. Pandey, &quot;A combination of statistical parameters for the detection of epilepsy and EEG classification using ANN and KNN classifier,&quot; Signal Image Video Process., vol. 15, no. 3, pp. 475–483, April 2021, doi: 10.1007/s11760-020-01767-4.</mixed-citation>
                    </ref>
                                    <ref id="ref28">
                        <label>28</label>
                        <mixed-citation publication-type="journal">E. Ergün, “Deep learning-based multiclass classification for citrus anomaly detection in Agriculture”. Signal Image Video Process, vol. 18, pp. 8077–8088, July 2024. doi: 10.1007/s11760-024-03452-2.</mixed-citation>
                    </ref>
                                    <ref id="ref29">
                        <label>29</label>
                        <mixed-citation publication-type="journal">E. Yavuz, and Ö. Aydemir,  “Olfaction recognition by EEG analysis using wavelet transform features”. In IEEE International Symposium on Innovations in Intelligent Systems and Applications (INISTA), pp. 1-4, August 2016, doi: 10.1109/INISTA.2016.7571827.</mixed-citation>
                    </ref>
                                    <ref id="ref30">
                        <label>30</label>
                        <mixed-citation publication-type="journal">G. Mary, S. Chitti, R. B. Vallabhaneni, and N. Renuka, &quot;EEG Signal Classification Automation using Novel Modified Random Forest Approach,&quot; J. Sci. Ind. Res., vol. 82, no. 1, pp. 101–108, January 2023, doi: 10.56042/jsir.v82i1.70213.</mixed-citation>
                    </ref>
                                    <ref id="ref31">
                        <label>31</label>
                        <mixed-citation publication-type="journal">A. Sakalle, P. Tomar, H. Bhardwaj, D. Acharya, and A. Bhardwaj, &quot;A LSTM based deep learning network for recognizing emotions using wireless brainwave driven system,&quot; Expert Syst. Appl., vol. 173, no.1, p. 114516, July 2021, doi: 10.1016/j.eswa.2020.114516.</mixed-citation>
                    </ref>
                                    <ref id="ref32">
                        <label>32</label>
                        <mixed-citation publication-type="journal">S. Bagherzadeh, K. Maghooli, A. Shalbaf, and A. Maghsoudi, &quot;Emotion recognition using continuous wavelet transform and ensemble of convolutional neural networks through transfer learning from electroencephalogram signal,&quot; Front. Biomed. Technol., vol. 10, no. 1, pp. 47–56, January 2023, doi: 10.18502/fbt.v10i1.11512.</mixed-citation>
                    </ref>
                                    <ref id="ref33">
                        <label>33</label>
                        <mixed-citation publication-type="journal">Y. Luo, C. Wu, and C. Lv, &quot;Cascaded Convolutional Recurrent Neural Networks for EEG Emotion Recognition Based on Temporal–Frequency–Spatial Features,&quot; Appl. Sci., vol. 13, no. 11, p. 6761, June 2023, doi: 10.3390/app13116761.</mixed-citation>
                    </ref>
                                    <ref id="ref34">
                        <label>34</label>
                        <mixed-citation publication-type="journal">J. Kim, J. Oh, and T. Y. Heo, &quot;Acoustic classification of mosquitoes using convolutional neural networks combined with activity circadian rhythm information,&quot; Int. J. Interact. Multimedia Artif. Intell., vol. 7, no. 2, pp. 59-65, December 2021, doi: 10.9781/ijimai.2021.08.009.</mixed-citation>
                    </ref>
                                    <ref id="ref35">
                        <label>35</label>
                        <mixed-citation publication-type="journal">V. Gupta, M. D. Chopda, and R. B. Pachori, &quot;Cross-subject emotion recognition using flexible analytic wavelet transform from EEG signals,&quot; IEEE Sensors J., vol. 19, no. 6, pp. 2266–2274, March 2019, doi: 10.1109/JSEN.2018.2883497.</mixed-citation>
                    </ref>
                                    <ref id="ref36">
                        <label>36</label>
                        <mixed-citation publication-type="journal">H. Mei and X. Xu, &quot;EEG-based emotion classification using convolutional neural network,&quot; in Proc. Int. Conf. Security, Pattern Anal., Cybern. (SPAC), Shenzhen, China, December 2017, pp. 130–135, doi: 10.1109/SPAC.2017.8304301.</mixed-citation>
                    </ref>
                                    <ref id="ref37">
                        <label>37</label>
                        <mixed-citation publication-type="journal">W. L. Zheng, J. Y. Zhu, and B. L. Lu, &quot;Identifying stable patterns over time for emotion recognition from EEG,&quot; IEEE Trans. Affect. Comput., vol. 10, no. 3, pp. 417–429, June 2017, doi: 10.1109/TAFFC.2017.2712143.</mixed-citation>
                    </ref>
                                    <ref id="ref38">
                        <label>38</label>
                        <mixed-citation publication-type="journal">M. Zangeneh Soroush, K. Maghooli, S. K. Setarehdan, and A. M. Nasrabadi, &quot;A novel EEG-based approach to classify emotions through phase space dynamics,&quot; Signal Image Video Process., vol. 13, pp. 1149–1156, March 2019, doi: 10.1007/s11760-019-01455-y.</mixed-citation>
                    </ref>
                                    <ref id="ref39">
                        <label>39</label>
                        <mixed-citation publication-type="journal">Y. H. Kwon, S. B. Shin, and S. D. Kim, &quot;Electroencephalography based fusion two-dimensional (2D)-convolution neural networks (CNN) model for emotion recognition system,&quot; Sensors, vol. 18, no. 5, p. 1383, April 2018, doi: 10.3390/s18051383.</mixed-citation>
                    </ref>
                            </ref-list>
                    </back>
    </article>
