@article{HusainVasishthSrinivasan2015, author = {Husain, Samar and Vasishth, Shravan and Srinivasan, Narayanan}, title = {Integration and prediction difficulty in Hindi sentence comprehension: Evidence from an eye-tracking corpus}, series = {Journal of Eye Movement Research}, volume = {8}, journal = {Journal of Eye Movement Research}, number = {2}, publisher = {International Group for Eye Movement Research}, address = {Bern}, issn = {1995-8692}, pages = {12}, year = {2015}, abstract = {This is the first attempt at characterizing reading difficulty in Hindi using naturally occurring sentences. We created the Potsdam-Allahabad Hindi Eyetracking Corpus by recording eye-movement data from 30 participants at the University of Allahabad, India. The target stimuli were 153 sentences selected from the beta version of the Hindi-Urdu treebank. We find that word- or low-level predictors (syllable length, unigram and bigram frequency) affect first-pass reading times, regression path duration, total reading time, and outgoing saccade length. An increase in syllable length results in longer fixations, and an increase in word unigram and bigram frequency leads to shorter fixations. Longer syllable length and higher frequency lead to longer outgoing saccades. We also find that two predictors of sentence comprehension difficulty, integration and storage cost, have an effect on reading difficulty. Integration cost (Gibson, 2000) was approximated by calculating the distance (in words) between a dependent and head; and storage cost (Gibson, 2000), which measures difficulty of maintaining predictions, was estimated by counting the number of predicted heads at each point in the sentence. We find that integration cost mainly affects outgoing saccade length, and storage cost affects total reading times and outgoing saccade length. Thus, word-level predictors have an effect in both early and late measures of reading time, while predictors of sentence comprehension difficulty tend to affect later measures. This is, to our knowledge, the first demonstration using eye-tracking that both integration and storage cost influence reading difficulty.}, language = {en} } @article{YangWangYanetal.2012, author = {Yang, Jiongjiong and Wang, Aobing and Yan, Ming and Zhu, Zijian and Chen, Cheng and Wang, Yizhou}, title = {Distinct processing for pictures of animals and objects Evidence from eye movements}, series = {Emotion : a new journal from the American Psychological Association}, volume = {12}, journal = {Emotion : a new journal from the American Psychological Association}, number = {3}, publisher = {American Psychological Association}, address = {Washington}, issn = {1528-3542}, doi = {10.1037/a0026848}, pages = {540 -- 551}, year = {2012}, abstract = {Many studies have suggested that emotional stimuli orient and engage attention. There is also evidence that animate stimuli, such as those from humans and animals, cause attentional bias. However, categorical and emotional factors are usually mixed, and it is unclear to what extent human context influences attentional allocation. To address this issue, we tracked participants' eye movements while they viewed pictures with animals and inanimate images (i.e., category) as focal objects. These pictures had either negative or neutral emotional valence, and either human body parts or nonhuman parts were near the focal objects (i.e., context). The picture's valence, arousal, position, size, and most of the low-level visual features were matched across categories. The results showed that nonhuman animals were more likely to be attended to and to be attended to for longer times than inanimate objects. The same pattern held for the human contexts (vs. nonhuman contexts). The effects of emotional valence, category, and context interacted. Specifically, in images with a negative valence, focal animals and objects with human context had comparable numbers of gaze fixations and gaze duration. These results highlighted the attentional bias to animate parts of a picture and clarified that the effects of category, valence, and picture context interacted to influence attentional allocation.}, language = {en} } @article{LagoSloggettSchlueteretal.2017, author = {Lago, Sol and Sloggett, Shayne and Schl{\"u}ter, Zoe and Chow, Wing Yee and Williams, Alexander and Lau, Ellen and Phillips, Colin}, title = {Coreference and Antecedent Representation Across Languages}, series = {Journal of experimental psychology : Learning, memory, and cognition}, volume = {43}, journal = {Journal of experimental psychology : Learning, memory, and cognition}, publisher = {American Psychological Association}, address = {Washington}, issn = {0278-7393}, doi = {10.1037/xlm0000343}, pages = {795 -- 817}, year = {2017}, language = {en} }