@article{PerscheidSiegmundTaeumeletal.2017, author = {Perscheid, Michael and Siegmund, Benjamin and Taeumel, Marcel and Hirschfeld, Robert}, title = {Studying the advancement in debugging practice of professional software developers}, series = {Software Quality Journal}, volume = {25}, journal = {Software Quality Journal}, publisher = {Springer}, address = {Dordrecht}, issn = {0963-9314}, doi = {10.1007/s11219-015-9294-2}, pages = {83 -- 110}, year = {2017}, abstract = {In 1997, Henry Lieberman stated that debugging is the dirty little secret of computer science. Since then, several promising debugging technologies have been developed such as back-in-time debuggers and automatic fault localization methods. However, the last study about the state-of-the-art in debugging is still more than 15 years old and so it is not clear whether these new approaches have been applied in practice or not. For that reason, we investigate the current state of debugging in a comprehensive study. First, we review the available literature and learn about current approaches and study results. Second, we observe several professional developers while debugging and interview them about their experiences. Third, we create a questionnaire that serves as the basis for a larger online debugging survey. Based on these results, we present new insights into debugging practice that help to suggest new directions for future research.}, language = {en} } @article{LevyBrunnerKelleretal.2019, author = {Levy, Jessica and Brunner, Martin and Keller, Ulrich and Fischbach, Antoine}, title = {Methodological issues in value-added modeling: an international review from 26 countries}, series = {Educational Assessment, Evaluation and Accountability}, volume = {31}, journal = {Educational Assessment, Evaluation and Accountability}, number = {3}, publisher = {Springer}, address = {Heidelberg}, issn = {1874-8597}, doi = {10.1007/s11092-019-09303-w}, pages = {257 -- 287}, year = {2019}, abstract = {Value-added (VA) modeling can be used to quantify teacher and school effectiveness by estimating the effect of pedagogical actions on students' achievement. It is gaining increasing importance in educational evaluation, teacher accountability, and high-stakes decisions. We analyzed 370 empirical studies on VA modeling, focusing on modeling and methodological issues to identify key factors for improvement. The studies stemmed from 26 countries (68\% from the USA). Most studies applied linear regression or multilevel models. Most studies (i.e., 85\%) included prior achievement as a covariate, but only 2\% included noncognitive predictors of achievement (e.g., personality or affective student variables). Fifty-five percent of the studies did not apply statistical adjustments (e.g., shrinkage) to increase precision in effectiveness estimates, and 88\% included no model diagnostics. We conclude that research on VA modeling can be significantly enhanced regarding the inclusion of covariates, model adjustment and diagnostics, and the clarity and transparency of reporting. What is the added value from attending a certain school or being taught by a certain teacher? To answer this question, the value-added (VA) model was developed. In this model, the actual achievement attained by students attending a certain school or being taught by a certain teacher is juxtaposed with the achievement that is expected for students with the same background characteristics (e.g., pretest scores). To this end, the VA model can be used to compute a VA score for each school or teacher, respectively. If actual achievement is better than expected achievement, there is a positive effect (i.e., a positive VA score) of attending a certain school or being taught by a certain teacher. In other words, VA models have been developed to "make fair comparisons of the academic progress of pupils in different settings" (Tymms 1999, p. 27). Their aim is to operationalize teacher or school effectiveness objectively. Specifically, VA models are often used for accountability purposes and high-stakes decisions (e.g., to allocate financial or personal resources to schools or even to decide which teachers should be promoted or discharged). Consequently, VA modeling is a highly political topic, especially in the USA, where many states have implemented VA or VA-based models for teacher evaluation (Amrein-Beardsley and Holloway 2017; Kurtz 2018). However, this use for high-stakes decisions is highly controversial and researchers seem to disagree concerning the question if VA scores should be used for decision-making (Goldhaber 2015). For a more exhaustive discussion of the use of VA models for accountability reasons, see, for example, Scherrer (2011). Given the far-reaching impact of VA scores, it is surprising that there is scarcity of systematic reviews of how VA scores are computed, evaluated, and how this research is reported. To this end, we review 370 empirical studies from 26 countries to rigorously examine several key issues in VA modeling, involving (a) the statistical model (e.g., linear regression, multilevel model) that is used, (b) model diagnostics and reported statistical parameters that are used to evaluate the quality of the VA model, (c) the statistical adjustments that are made to overcome methodological challenges (e.g., measurement error of the outcome variables), and (d) the covariates (e.g., pretest scores, students' sociodemographic background) that are used when estimating expected achievement. All this information is critical for meeting the transparency standards defined by the American Educational Research Association (AERA 2006). Transparency is vital for educational research in general and especially for highly consequential research, such as VA modeling. First, transparency is highly relevant for researchers. The clearer the description of the model, the easier it is to build upon the knowledge of previous research and to safeguard the potential for replicating previous results. Second, because decisions that are based on VA scores affect teachers' lives and schools' futures, not only educational agents but also the general public should be able to comprehend how these scores are calculated to allow for public scrutiny. Specifically, given that VA scores can have devastating consequences on teachers' lives and on the students they teach, transparency is particularly important to evaluate the chosen methodology to compute VA models for a certain purpose. Such evaluations are essential to answer the question to what extent the quality of VA scores allows to base far-reaching decisions on these scores for accountability purposes.}, language = {en} }