@article{HolzgrefeLangWellmannPetroneetal.2016, author = {Holzgrefe-Lang, Julia and Wellmann, Caroline and Petrone, Caterina and Raeling, Romy and Truckenbrodt, Hubert and H{\"o}hle, Barbara and Wartenburger, Isabell}, title = {How pitch change and final lengthening cue boundary perception in German: converging evidence from ERPs and prosodic judgements}, series = {Language, cognition and neuroscience}, volume = {31}, journal = {Language, cognition and neuroscience}, publisher = {Begell House}, address = {Abingdon}, issn = {2327-3798}, doi = {10.1080/23273798.2016.1157195}, pages = {904 -- 920}, year = {2016}, abstract = {This study examines the role of pitch and final lengthening in German intonation phrase boundary (IPB) perception. Since a prosody-related event-related potential (ERP) component termed Closure Positive Shift reflects the processing of major prosodic boundaries, we combined ERP and behavioural measures (i.e. a prosodic judgement task) to systematically test the impact of sole and combined cue occurrences on IPB perception. In two experiments we investigated whether adult listeners perceived an IPB in acoustically manipulated speech material that contained none, one, or two of the prosodic boundary cues. Both ERP and behavioural results suggest that pitch and final lengthening cues have to occur in combination to trigger IPB perception. Hence, the combination of behavioural and electrophysiological measures provides a comprehensive insight into prosodic boundary cue perception in German and leads to an argument in favour of interrelated cues from the frequency (i.e. pitch change) and the time (i.e. final lengthening) domain.}, language = {en} } @article{RoonGafos2016, author = {Roon, Kevin D. and Gafos, Adamantios I.}, title = {Perceiving while producing: Modeling the dynamics of phonological planning}, series = {Journal of memory and language}, volume = {89}, journal = {Journal of memory and language}, publisher = {Elsevier}, address = {San Diego}, issn = {0749-596X}, doi = {10.1016/j.jml.2016.01.005}, pages = {222 -- 243}, year = {2016}, abstract = {We offer a dynamical model of phonological planning that provides a formal instantiation of how the speech production and perception systems interact during online processing. The model is developed on the basis of evidence from an experimental task that requires concurrent use of both systems, the so-called response-distractor task in which speakers hear distractor syllables while they are preparing to produce required responses. The model formalizes how ongoing response planning is affected by perception and accounts for a range of results reported across previous studies. It does so by explicitly addressing the setting of parameter values in representations. The key unit of the model is that of the dynamic field, a distribution of activation over the range of values associated with each representational parameter. The setting of parameter values takes place by the attainment of a stable distribution of activation over the entire field, stable in the sense that it persists even after the response cue in the above experiments has been removed. This and other properties of representations that have been taken as axiomatic in previous work are derived by the dynamics of the proposed model. (C) 2016 Elsevier Inc. All rights reserved.}, language = {en} } @article{NixonvanRijMoketal.2016, author = {Nixon, Jessie S. and van Rij, Jacolien and Mok, Peggy and Baayen, Harald R. and Chen, Yiya}, title = {The temporal dynamics of perceptual uncertainty: eye movement evidence from Cantonese segment and tone perception}, series = {Journal of memory and language}, volume = {90}, journal = {Journal of memory and language}, publisher = {Elsevier}, address = {San Diego}, issn = {0749-596X}, doi = {10.1016/j.jml.2016.03.005}, pages = {103 -- 125}, year = {2016}, abstract = {Two visual world eyetracking experiments investigated how acoustic cue value and statistical variance affect perceptual uncertainty during Cantonese consonant (Experiment 1) and tone perception (Experiment 2). Participants heard low- or high-variance acoustic stimuli. Euclidean distance of fixations from target and competitor pictures over time was analysed using Generalised Additive Mixed Modelling. Distance of fixations from target and competitor pictures varied as a function of acoustic cue, providing evidence for gradient, nonlinear sensitivity to cue values. Moreover, cue value effects significantly interacted with statistical variance, indicating that the cue distribution directly affects perceptual uncertainty. Interestingly, the time course of effects differed between target distance and competitor distance models. The pattern of effects over time suggests a global strategy in response to the level of uncertainty: as uncertainty increases, verification looks increase accordingly. Low variance generally creates less uncertainty, but can lead to greater uncertainty in the face of unexpected speech tokens. (C) 2016 Elsevier Inc. All rights reserved.}, language = {en} }