This paper describes the Generic Automated Marking Environment (GAME) and provides a detailed analysis of its performance in assessing student programming projects and exercises. GAME has been designed to automatically assess programming assignments written in a variety of languages based on the ” structure” of the source code and the correctness of the program's output. Currently, the system is able to mark programs written in Java, C++ and the C language. To use the system, instructors are required to provide a simple ” marking schema” for each given assessment item, which includes pertinent information such as the location of files and the model solution. In this research, GAME has been tested on a number of student programming exercises and assignments and its performance has been compared against that of a human marker. An in-depth statistical analysis of the comparison is presented, providing encouraging results and directions for employing GAME as a tool for teaching and learning.
(private-note)The generic design of the system makes it a versatile tool for marking different types of assessment items. Overall, the results obtained from the experiments conducted suggest that the GAME system is very promising for use in a real-world teaching environment. It can handle submissions in different programming languages, which comprise of single or multiple source files. GAME also has the ability to accept and implement different marking strategies. Potentially, the advantages of employing an automated marking system like GAME are evident in terms of student-centered learning (ample and timely feedback) and objective, transparent mark allocation.
%0 Journal Article
%1 citeulike:8332473
%A Blumenstein, M.
%A Green, S.
%A Fogelman, S.
%A Nguyen, A.
%A Muthukkumarasamy, V.
%D 2008
%J Computers & Education
%K era-a-journal, grading-marking-students, homework, programming, teaching-aids-devices
%N 4
%P 1203--1216
%R 10.1016/j.compedu.2006.11.006
%T Performance analysis of GAME: A generic automated marking environment
%U http://dx.doi.org/10.1016/j.compedu.2006.11.006
%V 50
%X This paper describes the Generic Automated Marking Environment (GAME) and provides a detailed analysis of its performance in assessing student programming projects and exercises. GAME has been designed to automatically assess programming assignments written in a variety of languages based on the ” structure” of the source code and the correctness of the program's output. Currently, the system is able to mark programs written in Java, C++ and the C language. To use the system, instructors are required to provide a simple ” marking schema” for each given assessment item, which includes pertinent information such as the location of files and the model solution. In this research, GAME has been tested on a number of student programming exercises and assignments and its performance has been compared against that of a human marker. An in-depth statistical analysis of the comparison is presented, providing encouraging results and directions for employing GAME as a tool for teaching and learning.
@article{citeulike:8332473,
abstract = {{This paper describes the Generic Automated Marking Environment (GAME) and provides a detailed analysis of its performance in assessing student programming projects and exercises. GAME has been designed to automatically assess programming assignments written in a variety of languages based on the ” structure” of the source code and the correctness of the program's output. Currently, the system is able to mark programs written in Java, C++ and the C language. To use the system, instructors are required to provide a simple ” marking schema” for each given assessment item, which includes pertinent information such as the location of files and the model solution. In this research, GAME has been tested on a number of student programming exercises and assignments and its performance has been compared against that of a human marker. An in-depth statistical analysis of the comparison is presented, providing encouraging results and directions for employing GAME as a tool for teaching and learning.}},
added-at = {2011-07-27T01:37:06.000+0200},
author = {Blumenstein, M. and Green, S. and Fogelman, S. and Nguyen, A. and Muthukkumarasamy, V.},
biburl = {https://www.bibsonomy.org/bibtex/2723a3261933c3e291f445bfbd8f9e188/rubrics},
citeulike-article-id = {8332473},
citeulike-linkout-0 = {http://dx.doi.org/10.1016/j.compedu.2006.11.006},
comment = {(private-note)The generic design of the system makes it a versatile tool for marking different types of assessment items. Overall, the results obtained from the experiments conducted suggest that the GAME system is very promising for use in a real-world teaching environment. It can handle submissions in different programming languages, which comprise of single or multiple source files. GAME also has the ability to accept and implement different marking strategies. Potentially, the advantages of employing an automated marking system like GAME are evident in terms of student-centered learning (ample and timely feedback) and objective, transparent mark allocation.},
doi = {10.1016/j.compedu.2006.11.006},
interhash = {4e53ba6eb06c8b2d114ebf21696234e0},
intrahash = {723a3261933c3e291f445bfbd8f9e188},
journal = {Computers \& Education},
keywords = {era-a-journal, grading-marking-students, homework, programming, teaching-aids-devices},
month = may,
number = 4,
pages = {1203--1216},
posted-at = {2010-11-30 07:25:40},
priority = {4},
timestamp = {2011-07-27T01:37:25.000+0200},
title = {{Performance analysis of GAME: A generic automated marking environment}},
url = {http://dx.doi.org/10.1016/j.compedu.2006.11.006},
volume = 50,
year = 2008
}