marmoset-papers.bib

@INPROCEEDINGS{spacco-etx2004,
  AUTHOR = {Jaime Spacco and David Hovemeyer and William Pugh},
  TITLE = {An Eclipse-Based Course Project Snapshot and Submission System},
  BOOKTITLE = {3rd Eclipse Technology Exchange Workshop (eTX)},
  ADDRESS = {Vancouver, BC},
  MONTH = {October~24,},
  YEAR = {2004},
  PDF = {http://www.cs.umd.edu/~jspacco/marmoset/papers/spacco-etx2004.pdf},
  ABSTRACT = {
Much research has been done on techniques to teach students
how to program.  However, it is usually difficult to quantify
exactly how students work.
Instructors typically only see students' work when
they submit their projects or come to office hours.
Another common problem in introductory programming courses
is that student code is only subjected to rigorous
testing once it has been submitted.
Both of these problems can be viewed as a lack of feedback
between students and instructors.

We have built an Eclipse plugin to address this lack
of feedback.  The plugin has two main functions.
First, it captures, to a central CVS repository,
the complete state of a student's project every time
he or she saves, adds, or removes a file.
This produces a fine-grained history of the evolution of
each student's project.
Second, the plugin allows the student to submit his or her
project to a central server.  The submit server
automatically compiles and performs limited testing of the student's submission,
providing feedback on how close the project
is to fulfilling the project requirements.

Our goal is to provide instructors and researchers
with far more detailed information about how students learn and work,
and provide feedback to students that will help them
focus on achieving the goals of the projects we assign.
}
}

@INPROCEEDINGS{spacco-msr2005,
  AUTHOR = {Jaime Spacco and Jaymie Strecker and David Hovemeyer and William Pugh},
  TITLE = {Software Repository Mining with {Marmoset}:
           An Automated Programming Project Snapshot and
           Testing System},
  BOOKTITLE = {Proceedings of the Mining Software Repositories Workshop (MSR 2005)},
  YEAR = 2005,
  MONTH = {May},
  ADDRESS = {St. Louis, Missouri, USA},
  PDF = {http://www.cs.umd.edu/~jspacco/marmoset/papers/spacco-msr2005.pdf},
  ABSTRACT = {
  Most computer science educators hold strong opinions about the
``right'' approach to teaching introductory level programming.
Unfortunately, we have comparatively little hard evidence about the
effectiveness of these various approaches because we generally lack
the infrastructure to obtain sufficiently detailed data about novices'
programming habits.

To gain insight into students' programming habits, we developed
Marmoset, a project snapshot and submission system.  Like existing
project submission systems, Marmoset allows students to submit
versions of their projects to a central server, which automatically
tests them and records the results.  Unlike existing systems, Marmoset
also collects fine-grained code snapshots as students work on
projects: each time a student saves her work, it is automatically
committed to a CVS repository.

We believe the data collected by Marmoset will be a rich source of
insight about learning to program and software evolution in general.
To validate the effectiveness of our tool, we performed an experiment
which found a statistically significant correlation between warnings
reported by a static analysis tool and failed unit tests.

To make fine-grained code evolution data more useful, we present a
data schema which allows a variety of useful queries to be more easily
formulated and answered.

  }
}

@INPROCEEDINGS{hovemeyer-paste2005,
  EDITOR = {Michael D. Ernst and Thomas Jensen},
  AUTHOR = {David Hovemeyer and Jaime Spacco and Bill Pugh},
  TITLE = {Evaluating and Tuning a Static Analysis to Find Null Pointer Bugs},
  PUBLISHER = {ACM},
  ADDRESS = {Lisbon, Portugal},
  MONTH = {September~5--6,},
  YEAR = {2005},
  PDF = {http://www.cs.umd.edu/~jspacco/marmoset/papers/hovemeyer-paste2005.pdf},
  ABSTRACT = {
  
Using static analysis to detect memory access errors, such as null
pointer dereferences, is not a new problem.  However, much of the
previous work has used rather sophisticated analysis techniques in
order to detect such errors.

In this paper we show that simple analysis techniques can be used to
identify many such software defects, both in production code and in
student code. In order to make our analysis both simple and effective,
we use a non-standard analysis which is neither complete nor
sound. However, we find that it is effective at finding an interesting
class of software defects.

We describe the basic analysis we perform, as well as the additional
errors we can detect using techniques such as annotations and
inter-procedural analysis.

In studies of both production software and student projects, we find
false positive rates of around 20\% or less.  In the student code
base, we find that our static analysis techniques are able to pinpoint
50\% to 80\% of the defects leading to a null pointer exception at
runtime.
}
}

@INPROCEEDINGS{spacco-iticse2006,
  AUTHOR = {Jaime Spacco and David Hovemeyer and William Pugh and Jeff Hollingsworth and Nelson Padua-Perez and Fawzi Emad},
  TITLE = {Experiences with Marmoset: Designing and Using an Advanced Submission and Testing System for Programming Courses},
  PDF = {papers/spacco-iticse2006.pdf},
  BOOKTITLE = {ITiCSE '06: Proceedings of the 11th annual conference on Innovation and technology in computer science education},
  LOCATION = {Bologna, Italy},
  DATE = {June 26--28},
  ISBN = {1-59593-055-8},
  PUBLISHER = {ACM Press},
  YEAR = {2006},
  ABSTRACT = {
Two important questions regarding automated submission and testing
systems are: What kind of feedback should we give students as they
work on their programming assignments, and how can we study in more
detail the programming assignment development process of novices?

To address the issue of feedback, Marmoset provides students with
limited access to the results of the instructor's private test cases
using a novel token-based incentive system.  This both encourages
students to start their work early, and to think critically about
their work.  In addition, because students submit their work early,
instructors and TAs can monitor all students' progress on test cases.
This allows instructors to identify challenging test cases
early and either update the project specification or spend additional
time in lecture or lab sessions covering difficult material.

To study and better understand the development process of students,
Marmoset can be configured to transparently capture snapshots to a
central repository every-time students save their files.  These
detailed development histories offer a unique, detailed perspective of
each student's progress on a programming assignment, from the first
line of code written and saved all the way through the final edit
before the final submission.  This type of data has proven extremely
valuable for various uses, from mining new bug patterns to evaluating
existing bug-finding tools.

In this paper, we describe our initial experiences using Marmoset in
several introductory computer science courses,
from the perspectives of both instructors and students.  We also describe some initial
research results from analyzing the student snapshot database.
}
}

@TECHREPORT{cs-tr-4769,
  AUTHOR = {Jaime Spacco and David Hovemeyer and Bill Pugh and Jeff Hollingsworth and Nelson Padua-Perez and Fawzi Emad},
  TITLE = {Experiences with Marmoset},
  PDF = {papers/cs-tr-4769.pdf},
  YEAR = {2006},
  ABSTRACT = {
Many project submission and testing systems have been de-
veloped. These systems can be beneficial for both students
and instructors: students benefit from having automatic
feedback on which parts of their projects work correctly and
which parts still need work, while instructors benefit from
real-time feedback on the progress of individual students and
the class as a whole.
  A limitation of traditional project submission and test-
ing systems is that they only track the project versions that
students explicitly submit; what students are doing between
submissions remains shrouded in mystery. Based on expe-
rience, we know that many students who hit a stumbling
block resort to unproductive trial-and-error programming.
As instructors, we would like to know what these stumbling
blocks are.
  To help understand how students work, we developed Mar-
moset, a project submission, testing, and snapshot system.
The system has two novel features. First, it employs a
token-based incentive system to encourage students to start
work early and to think critically about their work. Second,
Marmoset can be configured to use CVS to transparently
capture a project snapshot every time students save a file.
The detailed development history thus captured offers a fine-
grained view of each student's progress.
  In this paper, we describe initial experiences with Mar-
moset, from the perspectives of both instructors and stu-
dents. We also describe some initial research results from
analyzing the student snapshot database.
}
}

@INPROCEEDINGS{spacco06aaai,
  AUTHOR = {Jaime Spacco and Titus Winters and Tom Payne},
  TITLE = {Inferring Use Cases from Unit Testing},
  BOOKTITLE = {AAAI Workshop on Educational Data Mining},
  YEAR = {2006},
  MONTH = {July},
  LOCATION = {Boston, MA, USA},
  PUBLISHER = {ACM Press},
  ADDRESS = {New York, NY, USA},
  PDF = {papers/WS0606SpaccoJ.pdf},
  ABSTRACT = {
We present techniques for analyzing score matrices of unit tests
outcomes from snapshots of CS2 student code throughout the development
cycle.  This analysis includes a technique for estimating the number
of fundamentally different features in the unit tests, as well as a
survey of which algorithms can best match human intuition when
grouping tests into related clusters.  Unlike previous investigations
into topic clustering of score matrices, we successfully identify
algorithms that perform with good accuracy on this task.  We also
discuss the data gathered by the Marmoset system, which has been used
to collect over 100,000 snapshots of student programs and associated test
results.
}
}


This file has been generated by bibtex2html 1.76

Web Accessibility