@article{OffredeJacobiReberniketal.2020, author = {Offrede, Tom F. and Jacobi, Jidde and Rebernik, Teja and de Jong, Lisanne and Keulen, Stefanie and Veenstra, Pauline and Noiray, Aude and Wieling, Martijn}, title = {The impact of alcohol on L1 versus L2}, series = {Language and Speech}, volume = {64}, journal = {Language and Speech}, number = {3}, publisher = {SAGE Publications}, address = {Thousand Oaks}, issn = {1756-6053}, doi = {10.1177/0023830920953169}, pages = {681 -- 692}, year = {2020}, abstract = {Alcohol intoxication is known to affect many aspects of human behavior and cognition; one of such affected systems is articulation during speech production. Although much research has revealed that alcohol negatively impacts pronunciation in a first language (L1), there is only initial evidence suggesting a potential beneficial effect of inebriation on articulation in a non-native language (L2). The aim of this study was thus to compare the effect of alcohol consumption on pronunciation in an L1 and an L2. Participants who had ingested different amounts of alcohol provided speech samples in their L1 (Dutch) and L2 (English), and native speakers of each language subsequently rated the pronunciation of these samples on their intelligibility (for the L1) and accent nativelikeness (for the L2). These data were analyzed with generalized additive mixed modeling. Participants' blood alcohol concentration indeed negatively affected pronunciation in L1, but it produced no significant effect on the L2 accent ratings. The expected negative impact of alcohol on L1 articulation can be explained by reduction in fine motor control. We present two hypotheses to account for the absence of any effects of intoxication on L2 pronunciation: (1) there may be a reduction in L1 interference on L2 speech due to decreased motor control or (2) alcohol may produce a differential effect on each of the two linguistic subsystems.}, language = {en} } @article{MokariGafosWilliams2021, author = {Mokari, Payam Ghaffarvand and Gafos, Adamantios I. and Williams, Daniel}, title = {Perceptuomotor compatibility effects in vowels}, series = {JASA Express Letters}, volume = {1}, journal = {JASA Express Letters}, number = {1}, publisher = {American Institute of Physics}, address = {Melville}, issn = {2691-1191}, doi = {10.1121/10.0003039}, pages = {8}, year = {2021}, abstract = {In a cue-distractor task, speakers' response times (RTs) were found to speed up when they perceived a distractor syllable whose vowel was identical to the vowel in the syllable they were preparing to utter. At a more fine-grained level, subphonemic congruency between response and distractor-defined by higher number of shared phonological features or higher acoustic proximity-was also found to be predictive of RT modulations. Furthermore, the findings indicate that perception of vowel stimuli embedded in syllables gives rise to robust and more consistent perceptuomotor compatibility effects (compared to isolated vowels) across different response-distractor vowel pairs.}, language = {en} } @article{GafosLieshout2020, author = {Gafos, Adamantios I. and Lieshout, Pascal H. H. M. van}, title = {Models and theories of speech production}, series = {Frontiers in psychology}, volume = {11}, journal = {Frontiers in psychology}, publisher = {Frontiers Research Foundation}, address = {Lausanne}, issn = {1664-1078}, doi = {10.3389/fpsyg.2020.01238}, pages = {4}, year = {2020}, language = {en} } @article{BollAvetisyanBhataraHoehle2017, author = {Boll-Avetisyan, Natalie and Bhatara, Anjali and H{\"o}hle, Barbara}, title = {Effects of musicality on the perception of rhythmic structure in speech}, series = {Laboratory phonology}, volume = {8}, journal = {Laboratory phonology}, number = {1}, publisher = {Ubiquity Press}, address = {London}, issn = {1868-6346}, doi = {10.5334/labphon.91}, pages = {16}, year = {2017}, abstract = {Language and music share many rhythmic properties, such as variations in intensity and duration leading to repeating patterns. Perception of rhythmic properties may rely on cognitive networks that are shared between the two domains. If so, then variability in speech rhythm perception may relate to individual differences in musicality. To examine this possibility, the present study focuses on rhythmic grouping, which is assumed to be guided by a domain-general principle, the Iambic/Trochaic law, stating that sounds alternating in intensity are grouped as strong-weak, and sounds alternating in duration are grouped as weak-strong. German listeners completed a grouping task: They heard streams of syllables alternating in intensity, duration, or neither, and had to indicate whether they perceived a strong-weak or weak-strong pattern. Moreover, their music perception abilities were measured, and they filled out a questionnaire reporting their productive musical experience. Results showed that better musical rhythm perception - ability was associated with more consistent rhythmic grouping of speech, while melody perception - ability and productive musical experience were not. This suggests shared cognitive procedures in the perception of rhythm in music and speech. Also, the results highlight the relevance of - considering individual differences in musicality when aiming to explain variability in prosody perception.}, language = {en} }