@article{EngelmannVasishthEngbertetal.2013, author = {Engelmann, Felix and Vasishth, Shravan and Engbert, Ralf and Kliegl, Reinhold}, title = {A framework for modeling the interaction of syntactic processing and eye movement control}, series = {Topics in cognitive science}, volume = {5}, journal = {Topics in cognitive science}, number = {3}, publisher = {Wiley-Blackwell}, address = {Hoboken}, issn = {1756-8757}, doi = {10.1111/tops.12026}, pages = {452 -- 474}, year = {2013}, abstract = {We explore the interaction between oculomotor control and language comprehension on the sentence level using two well-tested computational accounts of parsing difficulty. Previous work (Boston, Hale, Vasishth, \& Kliegl, 2011) has shown that surprisal (Hale, 2001; Levy, 2008) and cue-based memory retrieval (Lewis \& Vasishth, 2005) are significant and complementary predictors of reading time in an eyetracking corpus. It remains an open question how the sentence processor interacts with oculomotor control. Using a simple linking hypothesis proposed in Reichle, Warren, and McConnell (2009), we integrated both measures with the eye movement model EMMA (Salvucci, 2001) inside the cognitive architecture ACT-R (Anderson et al., 2004). We built a reading model that could initiate short Time Out regressions (Mitchell, Shen, Green, \& Hodgson, 2008) that compensate for slow postlexical processing. This simple interaction enabled the model to predict the re-reading of words based on parsing difficulty. The model was evaluated in different configurations on the prediction of frequency effects on the Potsdam Sentence Corpus. The extension of EMMA with postlexical processing improved its predictions and reproduced re-reading rates and durations with a reasonable fit to the data. This demonstration, based on simple and independently motivated assumptions, serves as a foundational step toward a precise investigation of the interaction between high-level language processing and eye movement control.}, language = {en} } @article{McCurdyKentnerVasishth2013, author = {McCurdy, Kate and Kentner, Gerrit and Vasishth, Shravan}, title = {Implicit prosody and contextual bias in silent reading}, series = {Journal of Eye Movement Research}, volume = {6}, journal = {Journal of Eye Movement Research}, number = {2}, publisher = {International Group for Eye Movement Research}, address = {Bern}, issn = {1995-8692}, pages = {17}, year = {2013}, abstract = {Eye-movement research on implicit prosody has found effects of lexical stress on syntactic ambiguity resolution, suggesting that metrical well-formedness constraints interact with syntactic category assignment. Building on these findings, the present eyetracking study investigates whether contextual bias can modulate the effects of metrical structure on syntactic ambiguity resolution in silent reading. Contextual bias and potential stress-clash in the ambiguous region were crossed in a 2 x 2 design. Participants read biased context sentences followed by temporarily ambiguous test sentences. In the three-word ambiguous region, main effects of lexical stress were dominant, while early effects of context were absent. Potential stress clash yielded a significant increase in first-pass regressions and re-reading probability across the three words. In the disambiguating region, the disambiguating word itself showed increased processing difficulty (lower skipping and increased re-reading probability) when the disambiguation engendered a stress clash configuration, while the word immediately following showed main effects of context in those same measures. Taken together, effects of lexical stress upon eye movements were swift and pervasive across first-pass and second-pass measures, while effects of context were relatively delayed. These results indicate a strong role for implicit meter in guiding parsing, one that appears insensitive to higher-level constraints. Our findings are problematic for two classes of models, the two-stage garden-path model and the constraint-based competition-integration model, but can be explained by a variation on the two-stage model, the unrestricted race model.}, language = {en} } @article{VasishthChenLietal.2013, author = {Vasishth, Shravan and Chen, Zhong and Li, Qiang and Guo, Gueilan}, title = {Processing chinese relative clauses - evidence for the subject-relative advantage}, series = {PLoS one}, volume = {8}, journal = {PLoS one}, number = {10}, publisher = {PLoS}, address = {San Fransisco}, issn = {1932-6203}, doi = {10.1371/journal.pone.0077006}, pages = {15}, year = {2013}, abstract = {A general fact about language is that subject relative clauses are easier to process than object relative clauses. Recently, several self-paced reading studies have presented surprising evidence that object relatives in Chinese are easier to process than subject relatives. We carried out three self-paced reading experiments that attempted to replicate these results. Two of our three studies found a subject-relative preference, and the third study found an object-relative advantage. Using a random effects bayesian meta-analysis of fifteen studies (including our own), we show that the overall current evidence for the subject-relative advantage is quite strong (approximate posterior probability of a subject-relative advantage given the data: 78-80\%). We argue that retrieval/integration based accounts would have difficulty explaining all three experimental results. These findings are important because they narrow the theoretical space by limiting the role of an important class of explanation-retrieval/integration cost-at least for relative clause processing in Chinese.}, language = {en} } @article{vonderMalsburgVasishth2013, author = {von der Malsburg, Titus Raban and Vasishth, Shravan}, title = {Scanpaths reveal syntactic underspecification and reanalysis strategies}, series = {Language and cognitive processes}, volume = {28}, journal = {Language and cognitive processes}, number = {10}, publisher = {Wiley}, address = {Hove}, issn = {0169-0965}, doi = {10.1080/01690965.2012.728232}, pages = {1545 -- 1578}, year = {2013}, abstract = {What theories best characterise the parsing processes triggered upon encountering ambiguity, and what effects do these processes have on eye movement patterns in reading? The present eye-tracking study, which investigated processing of attachment ambiguities of an adjunct in Spanish, suggests that readers sometimes underspecify attachment to save memory resources, consistent with the good-enough account of parsing. Our results confirm a surprising prediction of the good-enough account: high-capacity readers commit to an attachment decision more often than low-capacity participants, leading to more errors and a greater need to reanalyse in garden-path sentences. These results emerged only when we separated functionally different types of regressive eye movements using a scanpath analysis; conventional eye-tracking measures alone would have led to different conclusions. The scanpath analysis also showed that rereading was the dominant strategy for recovering from garden-pathing. Our results may also have broader implications for models of reading processes: reanalysis effects in eye movements occurred late, which suggests that the coupling of oculo-motor control and the parser may not be as tight as assumed in current computational models of eye movement control in reading.}, language = {en} } @article{BurchertHanneVasishth2013, author = {Burchert, Frank and Hanne, Sandra and Vasishth, Shravan}, title = {Sentence comprehension disorders in aphasia the concept of chance performance revisited}, series = {Aphasiology : an international, interdisciplinary journal}, volume = {27}, journal = {Aphasiology : an international, interdisciplinary journal}, number = {1}, publisher = {Wiley}, address = {Hove}, issn = {0268-7038}, doi = {10.1080/02687038.2012.730603}, pages = {112 -- 125}, year = {2013}, abstract = {Background: In behavioural tests of sentence comprehension in aphasia, correct and incorrect responses are often randomly distributed. Such a pattern of chance performance is a typical trait of Broca's aphasia, but can be found in other aphasic syndromes as well. Many researchers have argued that chance behaviour is the result of a guessing strategy, which is adopted in the face of a syntactic breakdown in sentence processing. Aims: Capitalising on new evidence from recent studies investigating online sentence comprehension in aphasia using the visual world paradigm, the aim of this paper is to review the concept of chance performance as a reflection of a syntactic impairment in sentence processing and to re-examine the conventional interpretation of chance performance as a guessing behaviour. Main Contribution: Based on a review of recent evidence from visual world paradigm studies, we argue that the assumption of chance performance equalling guessing is not necessarily compatible with actual real-time parsing procedures in people with aphasia. We propose a reinterpretation of the concept of chance performance by assuming that there are two distinct processing mechanisms underlying sentence comprehension in aphasia. Correct responses are always the result of normal-like parsing mechanisms, even in those cases where the overall performance pattern is at chance. Incorrect responses, on the other hand, are the result of intermittent deficiencies of the parser. Hence the random guessing behaviour that persons with aphasia often display does not necessarily reflect a syntactic breakdown in sentence comprehension and a random selection between alternatives. Instead it should be regarded as a result of temporal deficient parsing procedures in otherwise normal-like comprehension routines. Conclusion: Our conclusion is that the consideration of behavioural offline data alone may not be sufficient to interpret a performance in language tests and subsequently draw theoretical conclusions about language impairments. Rather it is important to call on additional data from online studies that look at language processing in real time in order to gain a comprehensive picture about syntactic comprehension abilities of people with aphasia and possible underlying deficits.}, language = {en} } @misc{VasishthvonderMalsburgEngelmann2013, author = {Vasishth, Shravan and von der Malsburg, Titus Raban and Engelmann, Felix}, title = {What eye movements can tell us about sentence comprehension}, series = {Wiley interdisciplinary reviews : Cognitive Science}, volume = {4}, journal = {Wiley interdisciplinary reviews : Cognitive Science}, number = {2}, publisher = {Wiley}, address = {San Fransisco}, issn = {1939-5078}, doi = {10.1002/wcs.1209}, pages = {125 -- 134}, year = {2013}, abstract = {Eye movement data have proven to be very useful for investigating human sentence processing. Eyetracking research has addressed a wide range of questions, such as recovery mechanisms following garden-pathing, the timing of processes driving comprehension, the role of anticipation and expectation in parsing, the role of semantic, pragmatic, and prosodic information, and so on. However, there are some limitations regarding the inferences that can be made on the basis of eye movements. One relates to the nontrivial interaction between parsing and the eye movement control system which complicates the interpretation of eye movement data. Detailed computational models that integrate parsing with eye movement control theories have the potential to unpack the complexity of eye movement data and can therefore aid in the interpretation of eye movements. Another limitation is the difficulty of capturing spatiotemporal patterns in eye movements using the traditional word-based eyetracking measures. Recent research has demonstrated the relevance of these patterns and has shown how they can be analyzed. In this review, we focus on reading, and present examples demonstrating how eye movement data reveal what events unfold when the parser runs into difficulty, and how the parsing system interacts with eye movement control. WIREs Cogn Sci 2013, 4:125134. doi: 10.1002/wcs.1209 For further resources related to this article, please visit the WIREs website.}, language = {en} }