@article{doi:10.5116/ijme.52c6.d7ef, author = {Cyr, P.R. and Smith, K.A. and Broyles, I.L. and Holt, C.T.}, title = {Developing, evaluating and validating a scoring rubric for written case reports}, journal = {Int J Med Educ}, volume = {5}, number = {}, pages = {18-23}, year = {2014}, doi = {10.5116/ijme.52c6.d7ef}, URL = {http://www.ijme.net/archive/5/developing-evaluating-and-validating-a-scoring-rubric/},eprint = {http://www.ijme.net/archive/5/developing-evaluating-and-validating-a-scoring-rubric.pdf}, abstract = {Objectives: The purpose of this study was to evaluate Family Medicine Clerkship students' writing skills using an anchored scoring rubric. In this study, we report on the assessment of a current scoring rubric (SR) used to grade written case description papers (CDP) for medical students, describe the development of a revised SR with examination of scoring consistency among faculty raters, and report on feedback from students regarding SR revisions and written CDP. Methods: Five faculty members scored a total of eighty-three written CDP using both the Original SR (OSR) and the Revised SR1 (RSR1) during the 2009-2010 academic years. Results: Overall increased faculty inter-rater reliability was obtained using the RSR1. Additionally, this subset analysis revealed that the five faculty using the Revised SR2 (RSR2) had a high measure of inter-rater reliability on their scoring of this subset of papers (as measured by intra-class correlation (ICC) with ICC = 0.93, p < 0.001. Conclusions: Findings from this research have implications for medical education, by highlighting the importance of the assessment and development of reliable evaluation tools for medical student writing projects.}, }