<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.4 20241031//EN"
        "https://jats.nlm.nih.gov/publishing/1.4/JATS-journalpublishing1-4.dtd">
<article  article-type="research-article"        dtd-version="1.4">
            <front>

                <journal-meta>
                                                                <journal-id>lisans</journal-id>
            <journal-title-group>
                                                                                    <journal-title>Ana Dili Eğitimi Dergisi</journal-title>
            </journal-title-group>
                                        <issn pub-type="epub">2147-6020</issn>
                                                                                            <publisher>
                    <publisher-name>Mehmet KURUDAYIOĞLU</publisher-name>
                </publisher>
                    </journal-meta>
                <article-meta>
                                        <article-id pub-id-type="doi">10.16916/aded.1212049</article-id>
                                                                <article-categories>
                                            <subj-group  xml:lang="en">
                                                            <subject>Other Fields of Education</subject>
                                                    </subj-group>
                                            <subj-group  xml:lang="tr">
                                                            <subject>Alan Eğitimleri</subject>
                                                    </subj-group>
                                    </article-categories>
                                                                                                                                                        <title-group>
                                                                                                                        <trans-title-group xml:lang="en">
                                    <trans-title>Investigation of Item Properties Affecting the Difficulty Index of PISA 2015 Reading Literacy Items</trans-title>
                                </trans-title-group>
                                                                                                                                                                                                <article-title>PISA 2015 Okuma Becerisi Maddelerinin Güçlük İndeksini Etkileyen Madde Özelliklerinin İncelenmesi</article-title>
                                                                                                    </title-group>
            
                                                    <contrib-group content-type="authors">
                                                                        <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0002-9526-6156</contrib-id>
                                                                <name>
                                    <surname>Demirkol</surname>
                                    <given-names>Sinem</given-names>
                                </name>
                                                            </contrib>
                                                    <contrib contrib-type="author">
                                                                    <contrib-id contrib-id-type="orcid">
                                        https://orcid.org/0000-0002-7301-0096</contrib-id>
                                                                <name>
                                    <surname>Ayvallı Karagöz</surname>
                                    <given-names>Merve</given-names>
                                </name>
                                                                    <aff>AKDENİZ ÜNİVERSİTESİ</aff>
                                                            </contrib>
                                                                                </contrib-group>
                        
                                        <pub-date pub-type="pub" iso-8601-date="20230723">
                    <day>07</day>
                    <month>23</month>
                    <year>2023</year>
                </pub-date>
                                        <volume>11</volume>
                                        <issue>3</issue>
                                        <fpage>567</fpage>
                                        <lpage>579</lpage>
                        
                        <history>
                                    <date date-type="received" iso-8601-date="20221130">
                        <day>11</day>
                        <month>30</month>
                        <year>2022</year>
                    </date>
                                                    <date date-type="accepted" iso-8601-date="20230720">
                        <day>07</day>
                        <month>20</month>
                        <year>2023</year>
                    </date>
                            </history>
                                        <permissions>
                    <copyright-statement>Copyright © 2013, Ana Dili Eğitimi Dergisi</copyright-statement>
                    <copyright-year>2013</copyright-year>
                    <copyright-holder>Ana Dili Eğitimi Dergisi</copyright-holder>
                </permissions>
            
                                                                                                <trans-abstract xml:lang="en">
                            <p>Makaleni The aim of this research is to investigate the item properties that affect the parameters of the reading literacy items. For this purpose, item format, cognitive domain and the effects of the interaction of these two variables on item difficulty are examined. The study group of the research consists of 2418 students who responded to the reading subtest in the PISA 2015 Turkey sample. The analyzes of the study are carried out with Explanatory IRT models, which is a multi-level method. According to the results, open-response items are significantly more difficult than multiple-choice items, and the items in the integrate cognitive domain are significantly more difficult than the items in the access and evaluate domain. In addition, constructed-response items are more suitable for measuring meta-cognitive domains, while selected-items are better for measuring achievements in sub-cognitive domains.</p></trans-abstract>
                                                                                                                                    <abstract><p>Bu çalışmanın amacı okuma becerisi maddelerinin güçlük indeksini etkileyen madde özelliklerini belirlemektir. Bu amaç doğrultusunda madde formatı, madde bilişsel alan düzeyi ve bu iki değişkene ait etkileşimin madde güçlüğü üzerindeki etkileri incelenmiştir. Araştırmanın çalışma grubunu PISA 2015 Türkiye uygulamasında okuma becerisi alt testine yanıt veren 2418 öğrenci oluşturmaktadır. Çalışmanın analizleri çok seviyeli bir yöntem olan Açıklayıcı MTK modelleri ile yürütülmüştür. Elde edilen sonuçlar açık uçlu maddelerin çoktan seçmeli maddelere göre, anlama ve yorumlama bilişsel alanında yer alan maddelerin ise bilgi ve değerlendirme basamağında yer alan maddelere göre anlamlı derecede daha zor olduğunu göstermektedir. Madde formatı ve madde bilişsel alan kesişimi incelendiğinde ise, bilişsel alanı anlama ve yorumlama olan maddelerinin açık uçlu sorulmasının maddeleri kolaylaştıracağı, bilgi basamağında yer alan maddelerin ise açık uçlu sorulmasının maddeleri zorlaştıracağı saptanmıştır.</p></abstract>
                                                            
            
                                                                                        <kwd-group>
                                                    <kwd>Madde formatı</kwd>
                                                    <kwd>  bilişsel alan</kwd>
                                                    <kwd>  açıklayıcı madde tepki kuramı</kwd>
                                                    <kwd>  PISA 2015</kwd>
                                            </kwd-group>
                            
                                                <kwd-group xml:lang="en">
                                                    <kwd>Item format</kwd>
                                                    <kwd>  cognitive domain</kwd>
                                                    <kwd>  explanatory IRT</kwd>
                                                    <kwd>  PISA 2015</kwd>
                                            </kwd-group>
                                                                                                                                        </article-meta>
    </front>
    <back>
                            <ref-list>
                                    <ref id="ref1">
                        <label>1</label>
                        <mixed-citation publication-type="journal">Ackerman, T. A., &amp; Smith, P. L. (1988). A comparison of the information provided by essay, multiple-choice, and free-response writing tests. Applied Psychological Measurement, 12(2), 117-128.</mixed-citation>
                    </ref>
                                    <ref id="ref2">
                        <label>2</label>
                        <mixed-citation publication-type="journal">Anderson, L., Krathwohl, D., Airasian, P., Cruikshank, K., Mayer, R., Pintrich, P., et al. (2000). A taxonomy for learning, teaching, and assessing: A revision of bloom&#039;s taxonomy of educational objectives. Abridged Edition: Allyn &amp; Bacon.</mixed-citation>
                    </ref>
                                    <ref id="ref3">
                        <label>3</label>
                        <mixed-citation publication-type="journal">Bacon, D. R. (2003). Assessing learning outcomes: A comparison of multiple-choice and short-answer questions in a marketing context. Journal of Marketing Education, 25(1), 31-36.</mixed-citation>
                    </ref>
                                    <ref id="ref4">
                        <label>4</label>
                        <mixed-citation publication-type="journal">Badger, E., &amp; Thomas, B. (1991). Open-ended questions in reading. Practical Assessment, Research, and Evaluation, 3(1), 4.</mixed-citation>
                    </ref>
                                    <ref id="ref5">
                        <label>5</label>
                        <mixed-citation publication-type="journal">Bates, D., Maechler, M., Bokler, B., &amp; Walker, S. (2014). Fitting linear mixed-effects models using lme4. Journal of Statistical Software, 67(1), 1-48.</mixed-citation>
                    </ref>
                                    <ref id="ref6">
                        <label>6</label>
                        <mixed-citation publication-type="journal">Becker, W. E., &amp; Johnston, C. (1999). The relationship between multiple choice and essay response questions in assessing economics understanding. Economic Record, 75(4), 348-357.</mixed-citation>
                    </ref>
                                    <ref id="ref7">
                        <label>7</label>
                        <mixed-citation publication-type="journal">Beller, M., &amp; Gafni, N. (2000). Can item format (multiple choice vs. open-ended) account for gender differences in mathematics achievement? Sex Roles, 42(1), 1-21.</mixed-citation>
                    </ref>
                                    <ref id="ref8">
                        <label>8</label>
                        <mixed-citation publication-type="journal">Bennett, R. E., Ward, W. C., Rock, D. A., &amp; LaHart, C. (1990). Toward a framework for constructed-response items.</mixed-citation>
                    </ref>
                                    <ref id="ref9">
                        <label>9</label>
                        <mixed-citation publication-type="journal">Bennett, R. E., Rock, D. A., Braun, H. I., Frye, D., Spohrer, J. C., &amp; Soloway, E. (1990). The relationship of expert-system scored constrained free-response items to multiple-choice and open-ended items. Applied Psychological Measurement, 14(2), 151-162.</mixed-citation>
                    </ref>
                                    <ref id="ref10">
                        <label>10</label>
                        <mixed-citation publication-type="journal">Ben-Simon, A., Budescu, D. V., &amp; Nevo, B. (1997). A comparative study of measures of partial knowledge in multiple-choice tests. Applied Psychological Measurement, 21(1), 65-88.</mixed-citation>
                    </ref>
                                    <ref id="ref11">
                        <label>11</label>
                        <mixed-citation publication-type="journal">Bible, L., Simkin, M. G., &amp; Kuechler, W. L. (2008). Using multiple-choice tests to evaluate students&#039; understanding of accounting. Accounting Education: An International Journal, 17(S1), S55-S68.</mixed-citation>
                    </ref>
                                    <ref id="ref12">
                        <label>12</label>
                        <mixed-citation publication-type="journal">Birgili, B. (2014). Open ended questions as an alternative to multiple choice: Dilemma in Turkish examination system (Master&#039;s thesis). Middle East Technical University Institute of Social Sciences, Ankara.</mixed-citation>
                    </ref>
                                    <ref id="ref13">
                        <label>13</label>
                        <mixed-citation publication-type="journal">Bloom BS, Krathwohl DR, &amp; Masia BB (1956). Taxonomy of educational objectives: The classification of educational goals. New York: McKay.</mixed-citation>
                    </ref>
                                    <ref id="ref14">
                        <label>14</label>
                        <mixed-citation publication-type="journal">Brown, G. A., Bull, J., &amp; Pendlebury, M. (2013). Assessing student learning in higher education. Routledge.</mixed-citation>
                    </ref>
                                    <ref id="ref15">
                        <label>15</label>
                        <mixed-citation publication-type="journal">Bush, M. (2001). A multiple choice test that rewards partial knowledge. Journal of Further and Higher Education, 25(2), 157-163.</mixed-citation>
                    </ref>
                                    <ref id="ref16">
                        <label>16</label>
                        <mixed-citation publication-type="journal">Coe, R., Waring, M., Hedges, L., &amp; Day Ashley, L. (Eds.) (2021). Research methods and methodologies in education. SAGE Publications. https://us.sagepub.com/en-us/nam/research-methods-and-methodologies-in-education/book271175#description</mixed-citation>
                    </ref>
                                    <ref id="ref17">
                        <label>17</label>
                        <mixed-citation publication-type="journal">Crocker, L. &amp; Algina, J. (1986). Introduction to classical and modern test theory. Toronto: Holt, RineHart, and Winston Inc.</mixed-citation>
                    </ref>
                                    <ref id="ref18">
                        <label>18</label>
                        <mixed-citation publication-type="journal">Cruickshank, D. L., Bainer, D. L., &amp; Metcalf, K. K (1995). The act of teaching. New York: McGraw-Hill.</mixed-citation>
                    </ref>
                                    <ref id="ref19">
                        <label>19</label>
                        <mixed-citation publication-type="journal">De Boeck, P., &amp; Wilson, M. (2004). Explanatory item response models: a generalized linear and nonlinear approach. New York, NY: Springer Press.</mixed-citation>
                    </ref>
                                    <ref id="ref20">
                        <label>20</label>
                        <mixed-citation publication-type="journal">De Boeck, P. (2008). Random item IRT models. Psychometrika, 73(4), 533-559.</mixed-citation>
                    </ref>
                                    <ref id="ref21">
                        <label>21</label>
                        <mixed-citation publication-type="journal">Demir, E. (2010). Uluslararası öğrenci değerlendirme programı (PISA) bilişsel alan testlerinde yer alan soru tiplerine göre Türkiye’de öğrenci başarıları (Yayımlanmamış yüksek lisans tezi). Hacettepe Üniversitesi Sosyal Bilimler Enstitüsü, Ankara.</mixed-citation>
                    </ref>
                                    <ref id="ref22">
                        <label>22</label>
                        <mixed-citation publication-type="journal">Dufresne, R. J., Leonard, W. J., &amp; Gerace, W. J. (2002). Marking sense of students&#039; answers to multiple-choice questions. The Physics Teacher, 40(3), 174-180.</mixed-citation>
                    </ref>
                                    <ref id="ref23">
                        <label>23</label>
                        <mixed-citation publication-type="journal">Fischer, G. H. (1973). The linear logistic test model as an instrument in educational research. Acta Psychologica, 37(6), 359–374.</mixed-citation>
                    </ref>
                                    <ref id="ref24">
                        <label>24</label>
                        <mixed-citation publication-type="journal">Fulcher, G., &amp; Davidson, F. (2007). Language testing and assessment. London and New York: Routledge.</mixed-citation>
                    </ref>
                                    <ref id="ref25">
                        <label>25</label>
                        <mixed-citation publication-type="journal">Gardner, R.C., Tremblay, P.F. &amp; Masgoret, A.M. (1997). Towards a Full Model of Second Language Learning: An Empirical Investigation. The Modern Language Journal, 81(3), 344-362.</mixed-citation>
                    </ref>
                                    <ref id="ref26">
                        <label>26</label>
                        <mixed-citation publication-type="journal">Geer, J. G. (1988). What do open-ended questions measure? Public Opinion Quarterly, 52(3), 365–367.</mixed-citation>
                    </ref>
                                    <ref id="ref27">
                        <label>27</label>
                        <mixed-citation publication-type="journal">Hancock, G. R. (1994). Cognitive complexity and the comparability of multiple-choice and constructed-response test formats. The Journal of Experimental Education, 62(2), 143-157.</mixed-citation>
                    </ref>
                                    <ref id="ref28">
                        <label>28</label>
                        <mixed-citation publication-type="journal">Haynie, W. (1994). Effect of Multiple – Choice &amp; short answer test on delayed retention learning. Journal of Technology Education, 6(1). 32-44</mixed-citation>
                    </ref>
                                    <ref id="ref29">
                        <label>29</label>
                        <mixed-citation publication-type="journal">Hmelo-Silver, C. E. (2004). Problem-based learning: What and how do students learn? Educational Psychology Review, 16(3), 235-266.</mixed-citation>
                    </ref>
                                    <ref id="ref30">
                        <label>30</label>
                        <mixed-citation publication-type="journal">Hurd, A. W. (1932). Comparisons of short answer and multiple choice tests covering identical subject content. The Journal of Educational Research, 26(1), 28-30.</mixed-citation>
                    </ref>
                                    <ref id="ref31">
                        <label>31</label>
                        <mixed-citation publication-type="journal">Jennings, S., &amp; Bush, M. (2006). A comparison of conventional and liberal (free-choice) multiple-choice tests. Practical Assessment, Research, and Evaluation, 11(1), 8.</mixed-citation>
                    </ref>
                                    <ref id="ref32">
                        <label>32</label>
                        <mixed-citation publication-type="journal">Kufahi, T.(2003). Measurement &amp; evaluation in special education. Amman: Dar Almasira.</mixed-citation>
                    </ref>
                                    <ref id="ref33">
                        <label>33</label>
                        <mixed-citation publication-type="journal">Lee, H.-S., Liu, O. L. ve Linn, M. C. (2011). Validating measurement of knowledge integration in science using multiple-choice and explanation items. Applied Measurement in Education, 24(2), 115–136.</mixed-citation>
                    </ref>
                                    <ref id="ref34">
                        <label>34</label>
                        <mixed-citation publication-type="journal">Lord, F. (1980). Applications of item response theory to practical testing problems. Hillsdale, NJ: Erlbaum.</mixed-citation>
                    </ref>
                                    <ref id="ref35">
                        <label>35</label>
                        <mixed-citation publication-type="journal">Lukhele, R., Thissen, D., &amp; Wainer, H. (1994). On the relative value of multiple-choice, constructed response, and examinee-selected items on two achievement tests. Journal of Educational Measurement, 31(3), 234–250.</mixed-citation>
                    </ref>
                                    <ref id="ref36">
                        <label>36</label>
                        <mixed-citation publication-type="journal">Martinez, M. E. (1999). Cognition and the question of test item format. Educational Psychologist, 34(4), 207-218.</mixed-citation>
                    </ref>
                                    <ref id="ref37">
                        <label>37</label>
                        <mixed-citation publication-type="journal">Melovitz Vasan, C. A., DeFouw, D. O., Holland, B. K., &amp; Vasan, N. S. (2018). Analysis of testing with multiple choice versus open‐ended questions: Outcome‐based observations in an anatomy course. Anatomical Sciences Education, 11(3), 254-261.</mixed-citation>
                    </ref>
                                    <ref id="ref38">
                        <label>38</label>
                        <mixed-citation publication-type="journal">Organisation for Economic Co-operation and Development [OECD]. (2017a). PISA 2015 technical report. Paris, France: OECD. Retrieved from https://www.oecd.org/pisa/data/2015-technical-report/.</mixed-citation>
                    </ref>
                                    <ref id="ref39">
                        <label>39</label>
                        <mixed-citation publication-type="journal">Organisation for Economic Co-operation and development [OECD]. (2017b). PISA 2015 technical report. Paris, France: OECD. Retrieved from https://www.oecd.org/pisa/data/2015-technical-report/.</mixed-citation>
                    </ref>
                                    <ref id="ref40">
                        <label>40</label>
                        <mixed-citation publication-type="journal">Ormell, C. P. (1974). Bloom&#039;s taxonomy and the objectives of education. Educational Research, 17, 3-18.</mixed-citation>
                    </ref>
                                    <ref id="ref41">
                        <label>41</label>
                        <mixed-citation publication-type="journal">Osterlind, S. J. (1998). Constructing test items: Multiple-choice, constructed-response, performance, and other formats. Dordrecht, Netherlands: Kluwer Academic.</mixed-citation>
                    </ref>
                                    <ref id="ref42">
                        <label>42</label>
                        <mixed-citation publication-type="journal">Robbins, A. (1995). İçindeki devi uyandır. (Çev. B. Çorakçı Dişbudak). İstanbul: İnkılap Yayınevi.</mixed-citation>
                    </ref>
                                    <ref id="ref43">
                        <label>43</label>
                        <mixed-citation publication-type="journal">Ruch, G. M., &amp; Stoddard, G. D. (1925). Comparative reliabilities of five types of objective examinations. Journal of Educational Psychology, 16(2), 89.</mixed-citation>
                    </ref>
                                    <ref id="ref44">
                        <label>44</label>
                        <mixed-citation publication-type="journal">Pepple, D. J., Young, L. E., &amp; Carroll, R. G. (2010). A comparison of student performance in multiple-choice and long essay questions in the MBBS stage I physiology examination at the University of the West Indies (Mona Campus). American Journal of Physiology - Advances in Physiology Education, 34(2), 86–89.</mixed-citation>
                    </ref>
                                    <ref id="ref45">
                        <label>45</label>
                        <mixed-citation publication-type="journal">Phipps, S. D., &amp; Brackbill, M. L. (2009). Relationship between assessment item format and item performance characteristics. American Journal of Pharmaceutical Education, 73(8).</mixed-citation>
                    </ref>
                                    <ref id="ref46">
                        <label>46</label>
                        <mixed-citation publication-type="journal">Pollack, J. M., Rock, D. A., &amp; Jenkins, F. (1992). Advantages and disadvantages of constructed-response item formats in large- scale surveys. Paper presented at the Annual Meeting of the American Educational Research Association, San Francisco.</mixed-citation>
                    </ref>
                                    <ref id="ref47">
                        <label>47</label>
                        <mixed-citation publication-type="journal">Powell, J. L., &amp; Gillespie, C. (1990). Assessment: all tests are not created equally. Paper presented at the Annual Meeting of the American Reading Forum, Sarasota.</mixed-citation>
                    </ref>
                                    <ref id="ref48">
                        <label>48</label>
                        <mixed-citation publication-type="journal">Traub, R. E., &amp; Fisher, C. W. (1997). On the equivalence of constructed-response and multiple-choice tests. Applied Psychological Measurement, 1(3), 355-369.</mixed-citation>
                    </ref>
                                    <ref id="ref49">
                        <label>49</label>
                        <mixed-citation publication-type="journal">Van den Bergh, H. (1990). On the construct validity of multiple-choice items for reading comprehension. Applied Psychological Measurement, 14(1), 1-12.</mixed-citation>
                    </ref>
                                    <ref id="ref50">
                        <label>50</label>
                        <mixed-citation publication-type="journal">Ventouras, E., Triantis, D., Tsiakas, P., &amp; Stergiopoulos, C. (2010). Comparison of examination methods based on multiple-choice questions and constructed-response questions using personal computers. Computers &amp; Education, 54(2), 455-461.</mixed-citation>
                    </ref>
                                    <ref id="ref51">
                        <label>51</label>
                        <mixed-citation publication-type="journal">Wainer, H., &amp; Thissen, D. (1993). Combining multiple-choice and constructed-response test scores: Toward a Marxist theory of test construction. Applied Measurement in Education, 6(2), 103-118.</mixed-citation>
                    </ref>
                                    <ref id="ref52">
                        <label>52</label>
                        <mixed-citation publication-type="journal">Walstad, W. B., &amp; Becker, W. E. (1994). Achievement differences on multiple-choice and essay tests in economics. The American Economic Review, 84(2), 193–196.</mixed-citation>
                    </ref>
                                    <ref id="ref53">
                        <label>53</label>
                        <mixed-citation publication-type="journal">Walstad, W. B. (1998). Multiple choice tests for the economics course. In W. B. Walstad &amp; P. Saunder (Eds.). In teaching undergraduate economics: A handbook for instructors (pp. 287-304), New York: McGraw- Hill.</mixed-citation>
                    </ref>
                                    <ref id="ref54">
                        <label>54</label>
                        <mixed-citation publication-type="journal">Zeidner, M. (1987). Essay versus multiple-choice type classroom exams: the student’s perspective. The Journal of Educational Research, 80(6), 352-358.</mixed-citation>
                    </ref>
                            </ref-list>
                    </back>
    </article>
