@article{SchoenebeckElsner2019, author = {Sch{\"o}nebeck, Maria and Elsner, Birgit}, title = {ERPs reveal perceptual and conceptual processing in 14-month-olds' observation of complete and incomplete action end-states}, series = {Neuropsychologia : an international journal in behavioural and cognitive neuroscience}, volume = {126}, journal = {Neuropsychologia : an international journal in behavioural and cognitive neuroscience}, publisher = {Elsevier}, address = {Oxford}, issn = {0028-3932}, doi = {10.1016/j.neuropsychologia.2017.10.026}, pages = {102 -- 112}, year = {2019}, abstract = {Infants in the second year of life not only detect the visible goals or end-states of other people's action, but they also seem to be able to infer others' underlying intentions. The present study used event-related potentials (ERPs) to investigate the biological basis of infants' processing of others' goal-directed actions, with special regard to the involvement of bottom-up perceptual and top-down conceptual processes. In an adaptation of the behavioral re-enactment procedure, 14-month-olds were first familiarized with either full demonstrations (FD), failed attempts (FA), or arbitrary (AA) object-directed actions. Next, ERPs were measured while all infants saw the same two pictures of the end-states of the full demonstration (complete end-state) and the failed attempt (incomplete end-state). In the time-windows related to perceptual processing (100-200 ms after stimulus onset) and to conceptual processing (300-700 ms), ERP negativity over frontal and central regions was higher for the complete than for the incomplete end-state in the FD and FA conditions. When comparing the FA and AA conditions, this pattern of results occurred only for the conceptual time domain. Moreover, beginning slow-wave activity (700-1000 ms) differed for the end-state pictures in the three conditions, suggesting differential encoding demands. Together, the electrophysiological data indicate that infants in the second year of life use bottom-up perceptual as well as top-down conceptual processing to give meaning to others' goal-directed actions.}, language = {en} } @article{DambacherDimigenBraunetal.2012, author = {Dambacher, Michael and Dimigen, Olaf and Braun, Mario and Wille, Kristin and Jacobs, Arthur M. and Kliegl, Reinhold}, title = {Stimulus onset asynchrony and the timeline of word recognition: Event-related potentials during sentence reading}, series = {Neuropsychologia : an international journal in behavioural and cognitive neuroscience}, volume = {50}, journal = {Neuropsychologia : an international journal in behavioural and cognitive neuroscience}, number = {8}, publisher = {Elsevier}, address = {Oxford}, issn = {0028-3932}, doi = {10.1016/j.neuropsychologia.2012.04.011}, pages = {1852 -- 1870}, year = {2012}, abstract = {Three ERP experiments examined the effect of word presentation rate (i.e., stimulus onset asynchrony, SOA) on the time course of word frequency and predictability effects in sentence reading. In Experiments 1 and 2, sentences were presented word-by-word in the screen center at an SOA of 700 and 490 ms, respectively. While these rates are typical for psycholinguistic ERP research, natural reading happens at a considerably faster pace. Accordingly. Experiment 3 employed a near-normal SOA of 280 ms, which approximated the rate of normal reading. Main results can be summarized as follows: (1) The onset latency of early frequency effects decreases gradually with increasing presentation rates. (2) An early interaction between top-down and bottom-up processing is observed only under a near-normal SOA. (3) N400 predictability effects occur later and are smaller at a near-normal (i.e., high) presentation rate than at the lower rates commonly used in ERP experiments. (4) ERP morphology is different at the shortest compared to longer SOAs. Together, the results point to a special role of a near-normal presentation rate for visual word recognition and therefore suggest that SOA should be taken into account in research of natural reading.}, language = {en} }