@article{YangWangYanetal.2012, author = {Yang, Jiongjiong and Wang, Aobing and Yan, Ming and Zhu, Zijian and Chen, Cheng and Wang, Yizhou}, title = {Distinct processing for pictures of animals and objects Evidence from eye movements}, series = {Emotion : a new journal from the American Psychological Association}, volume = {12}, journal = {Emotion : a new journal from the American Psychological Association}, number = {3}, publisher = {American Psychological Association}, address = {Washington}, issn = {1528-3542}, doi = {10.1037/a0026848}, pages = {540 -- 551}, year = {2012}, abstract = {Many studies have suggested that emotional stimuli orient and engage attention. There is also evidence that animate stimuli, such as those from humans and animals, cause attentional bias. However, categorical and emotional factors are usually mixed, and it is unclear to what extent human context influences attentional allocation. To address this issue, we tracked participants' eye movements while they viewed pictures with animals and inanimate images (i.e., category) as focal objects. These pictures had either negative or neutral emotional valence, and either human body parts or nonhuman parts were near the focal objects (i.e., context). The picture's valence, arousal, position, size, and most of the low-level visual features were matched across categories. The results showed that nonhuman animals were more likely to be attended to and to be attended to for longer times than inanimate objects. The same pattern held for the human contexts (vs. nonhuman contexts). The effects of emotional valence, category, and context interacted. Specifically, in images with a negative valence, focal animals and objects with human context had comparable numbers of gaze fixations and gaze duration. These results highlighted the attentional bias to animate parts of a picture and clarified that the effects of category, valence, and picture context interacted to influence attentional allocation.}, language = {en} } @article{PassowWesterhausenWartenburgeretal.2012, author = {Passow, Susanne and Westerhausen, Rene and Wartenburger, Isabell and Hugdahl, Kenneth and Heekeren, Hauke R. and Lindenberger, Ulman and Li, Shu-Chen}, title = {Human aging compromises attentional control of auditory perception}, series = {Psychology and aging}, volume = {27}, journal = {Psychology and aging}, number = {1}, publisher = {American Psychological Association}, address = {Washington}, issn = {0882-7974}, doi = {10.1037/a0025667}, pages = {99 -- 105}, year = {2012}, abstract = {Older adults often experience hearing difficulties in multitalker situations. Attentional control of auditory perception is crucial in situations where a plethora of auditory inputs compete for further processing. We combined an intensity-modulated dichotic listening paradigm with attentional manipulations to study adult age differences in the interplay between perceptual saliency and attentional control of auditory processing. When confronted with two competing sources of verbal auditory input, older adults modulated their attention less flexibly and were more driven by perceptual saliency than younger adults. These findings suggest that aging severely impairs the attentional regulation of auditory perception.}, language = {en} }