@article{LuoYanYanetal.2016, author = {Luo, Yingyi and Yan, Ming and Yan, Shaorong and Zhou, Xiaolin and Inhoff, Albrecht W.}, title = {Syllabic tone articulation influences the identification and use of words during Chinese sentence reading: Evidence from ERP and eye movement recordings}, series = {The science of the total environment : an international journal for scientific research into the environment and its relationship with man}, volume = {16}, journal = {The science of the total environment : an international journal for scientific research into the environment and its relationship with man}, publisher = {Springer}, address = {New York}, issn = {1530-7026}, doi = {10.3758/s13415-015-0368-1}, pages = {72 -- 92}, year = {2016}, abstract = {In two experiments, we examined the contribution of articulation-specific features to visual word recognition during the reading of Chinese. In spoken Standard Chinese, a syllable with a full tone can be tone-neutralized through sound weakening and pitch contour change, and there are two types of two-character compound words with respect to their articulation variation. One type requires articulation of a full tone for each constituent character, and the other requires a full- and a neutral-tone articulation for the first and second characters, respectively. Words of these two types with identical first characters were selected and embedded in sentences. Native speakers of Standard Chinese were recruited to read the sentences. In Experiment 1, the individual words of a sentence were presented serially at a fixed pace while event-related potentials were recorded. This resulted in less-negative N100 and anterior N250 amplitudes and in more-negative N400 amplitudes when targets contained a neutral tone. Complete sentences were visible in Experiment 2, and eye movements were recorded while participants read. Analyses of oculomotor activity revealed shorter viewing durations and fewer refixations on-and fewer regressive saccades to-target words when their second syllable was articulated with a neutral rather than a full tone. Together, the results indicate that readers represent articulation-specific word properties, that these representations are routinely activated early during the silent reading of Chinese sentences, and that the representations are also used during later stages of word processing.}, language = {en} } @article{NixonvanRijMoketal.2016, author = {Nixon, Jessie S. and van Rij, Jacolien and Mok, Peggy and Baayen, Harald R. and Chen, Yiya}, title = {The temporal dynamics of perceptual uncertainty: eye movement evidence from Cantonese segment and tone perception}, series = {Journal of memory and language}, volume = {90}, journal = {Journal of memory and language}, publisher = {Elsevier}, address = {San Diego}, issn = {0749-596X}, doi = {10.1016/j.jml.2016.03.005}, pages = {103 -- 125}, year = {2016}, abstract = {Two visual world eyetracking experiments investigated how acoustic cue value and statistical variance affect perceptual uncertainty during Cantonese consonant (Experiment 1) and tone perception (Experiment 2). Participants heard low- or high-variance acoustic stimuli. Euclidean distance of fixations from target and competitor pictures over time was analysed using Generalised Additive Mixed Modelling. Distance of fixations from target and competitor pictures varied as a function of acoustic cue, providing evidence for gradient, nonlinear sensitivity to cue values. Moreover, cue value effects significantly interacted with statistical variance, indicating that the cue distribution directly affects perceptual uncertainty. Interestingly, the time course of effects differed between target distance and competitor distance models. The pattern of effects over time suggests a global strategy in response to the level of uncertainty: as uncertainty increases, verification looks increase accordingly. Low variance generally creates less uncertainty, but can lead to greater uncertainty in the face of unexpected speech tokens. (C) 2016 Elsevier Inc. All rights reserved.}, language = {en} }