@phdthesis{Postler2006, author = {Postler, Jenny}, title = {Die neuronale Verarbeitung von Nomen und Verben}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-10214}, school = {Universit{\"a}t Potsdam}, year = {2006}, abstract = {Seit etwa zwei Jahrzehnten stellt die kognitive und neuronale Verarbeitung von Nomen und Verben einen bedeutsamen Forschungsschwerpunkt im Bereich der Neurolinguistik und Neuropsychologie dar. Intensive Forschungsbem{\"u}hungen der letzten Jahre erbrachten eine Reihe von Ergebnissen, die jedoch {\"u}berwiegend inkonsistent und widerspr{\"u}chlich sind. Eine h{\"a}ufig vertretene Annahme im Bezug auf die neuronale Basis der Nomen und Verb Verarbeitung ist die so genannte anterior-posterior Dissoziation. Demnach werden Nomen in temporalen und Verben in frontalen Regionen der sprachdominanten, linken Hemisph{\"a}re verarbeitet. Die vorliegende Dissertation untersucht mit Hilfe der funktionellen Magnetresonanztomographie, welche kortikalen Regionen in den Abruf von Nomen und Verben beim stillen Bildbennen involviert sind. Ferner wird der Einfluss des Faktors age-of-acquisition (Erwerbsalter) auf die Hirnaktivierung beim Bildbenennen {\"u}berpr{\"u}ft. Die Ergebnisse der Studie zeigen, dass der Abruf von Nomen und Verben {\"a}hnliche kortikale Aktivierungen in bilateral okzipitalen sowie links frontalen, temporalen und inferior parietalen Regionen hervorruft, wobei f{\"u}r Verben st{\"a}rkere Aktivierungen in links frontalen und bilateral temporalen Arealen beobachtet wurden. Dieses Ergebnis widerspricht der Annahme einer anterior-posterior Dissoziation. Die beobachteten Aktivierungsmuster unterst{\"u}tzen dagegen die Auffassung, dass ein gemeinsames Netzwerk bestehend aus anterioren und posterioren Komponenten f{\"u}r die Verarbeitung von Nomen und Verben beim Bildbenennen verantwortlich ist. Die Studie ergab ferner, dass kortikale Aktivierungen beim Bildbenennen durch das Erwerbsalter moduliert werden. Dabei zeigten sich Aktivierungen f{\"u}r sp{\"a}ter erworbene W{\"o}rter im linken inferioren Frontallappen und im basal temporalen Sprachareal. Die Ergebnisse werden diskutiert und interpretiert vor dem Hintergrund aktueller kognitiver und neuroanatomischer Modelle der Sprachverarbeitung.}, subject = {Nomen}, language = {de} } @phdthesis{VenturaBort2020, author = {Ventura-Bort, Carlos}, title = {Temporo-spatial dynamics of the impact of emotional contexts on visual processing and memory}, doi = {10.25932/publishup-55023}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-550236}, school = {Universit{\"a}t Potsdam}, pages = {208}, year = {2020}, abstract = {It has frequently been observed that single emotional events are not only more efficiently processed, but also better remembered, and form longer-lasting memory traces than neutral material. However, when emotional information is perceived as a part of a complex event, such as in the context of or in relation to other events and/or source details, the modulatory effects of emotion are less clear. The present work aims to investigate how emotional, contextual source information modulates the initial encoding and subsequent long-term retrieval of associated neutral material (item memory) and contextual source details (contextual source memory). To do so, a two-task experiment was used, consisting of an incidental encoding task in which neutral objects were displayed over different contextual background scenes which varied in emotional content (unpleasant, pleasant, and neutral), and a delayed retrieval task (1 week), in which previously-encoded objects and new ones were presented. In a series of studies, behavioral indices (Studies 2, 3, and 5), event-related potentials (ERPs; Studies 1-4), and functional magnetic resonance imaging (Study 5) were used to investigate whether emotional contexts can rapidly tune the visual processing of associated neutral information (Study 1) and modulate long-term item memory (Study 2), how different recognition memory processes (familiarity vs. recollection) contribute to these emotion effects on item and contextual source memory (Study 3), whether the emotional effects of item memory can also be observed during spontaneous retrieval (Sstudy 4), and which brain regions underpin the modulatory effects of emotional contexts on item and contextual source memory (Study 5). In Study 1, it was observed that emotional contexts by means of emotional associative learning, can rapidly alter the processing of associated neutral information. Neutral items associated with emotional contexts (i.e. emotional associates) compared to neutral ones, showed enhanced perceptual and more elaborate processing after one single pairing, as indexed by larger amplitudes in the P100 and LPP components, respectively. Study 2 showed that emotional contexts produce longer-lasting memory effects, as evidenced by better item memory performance and larger ERP Old/New differences for emotional associates. In Study 3, a mnemonic differentiation was observed between item and contextual source memory which was modulated by emotion. Item memory was driven by familiarity, independently of emotional contexts during encoding, whereas contextual source memory was driven by recollection, and better for emotional material. As in Study 2, enhancing effects of emotional contexts for item memory were observed in ERPs associated with recollection processes. Likewise, for contextual source memory, a pronounced recollection-related ERP enhancement was observed for exclusively emotional contexts. Study 4 showed that the long-term recollection enhancement of emotional contexts on item memory can be observed even when retrieval is not explicitly attempted, as measured with ERPs, suggesting that the emotion enhancing effects on memory are not related to the task embedded during recognition, but to the motivational relevance of the triggering event. In Study 5, it was observed that enhancing effects of emotional contexts on item and contextual source memory involve stronger engagement of the brain's regions which are associated with memory recollection, including areas of the medial temporal lobe, posterior parietal cortex, and prefrontal cortex. Taken together, these findings suggest that emotional contexts rapidly modulate the initial processing of associated neutral information and the subsequent, long-term item and contextual source memories. The enhanced memory effects of emotional contexts are strongly supported by recollection rather than familiarity processes, and are shown to be triggered when retrieval is both explicitly and spontaneously attempted. These results provide new insights into the modulatory role of emotional information on the visual processing and the long-term recognition memory of complex events. The present findings are integrated into the current theoretical models and future ventures are discussed.}, language = {en} } @phdthesis{Kuhnke2021, author = {Kuhnke, Philipp}, title = {The neural basis of conceptual knowledge retrieval}, doi = {10.25932/publishup-51441}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-514414}, school = {Universit{\"a}t Potsdam}, pages = {133}, year = {2021}, abstract = {Conceptual knowledge about objects, people and events in the world is central to human cognition, underlying core cognitive abilities such as object recognition and use, and word comprehension. Previous research indicates that concepts consist of perceptual and motor features represented in modality-specific perceptual-motor brain regions. In addition, cross-modal convergence zones integrate modality-specific features into more abstract conceptual representations. However, several questions remain open: First, to what extent does the retrieval of perceptual-motor features depend on the concurrent task? Second, how do modality-specific and cross-modal regions interact during conceptual knowledge retrieval? Third, which brain regions are causally relevant for conceptually-guided behavior? This thesis addresses these three key issues using functional magnetic resonance imaging (fMRI) and transcranial magnetic stimulation (TMS) in the healthy human brain. Study 1 - an fMRI activation study - tested to what extent the retrieval of sound and action features of concepts, and the resulting engagement of auditory and somatomotor brain regions depend on the concurrent task. 40 healthy human participants performed three different tasks - lexical decision, sound judgment, and action judgment - on words with a high or low association to sounds and actions. We found that modality-specific regions selectively respond to task-relevant features: Auditory regions selectively responded to sound features during sound judgments, and somatomotor regions selectively responded to action features during action judgments. Unexpectedly, several regions (e.g. the left posterior parietal cortex; PPC) exhibited a task-dependent response to both sound and action features. We propose these regions to be "multimodal", and not "amodal", convergence zones which retain modality-specific information. Study 2 - an fMRI connectivity study - investigated the functional interaction between modality-specific and multimodal areas during conceptual knowledge retrieval. Using the above fMRI data, we asked (1) whether modality-specific and multimodal regions are functionally coupled during sound and action feature retrieval, (2) whether their coupling depends on the task, (3) whether information flows bottom-up, top-down, or bidirectionally, and (4) whether their coupling is behaviorally relevant. We found that functional coupling between multimodal and modality-specific areas is task-dependent, bidirectional, and relevant for conceptually-guided behavior. Left PPC acted as a connectivity "switchboard" that flexibly adapted its coupling to task-relevant modality-specific nodes. Hence, neuroimaging studies 1 and 2 suggested a key role of left PPC as a multimodal convergence zone for conceptual knowledge. However, as neuroimaging is correlational, it remained unknown whether left PPC plays a causal role as a multimodal conceptual hub. Therefore, study 3 - a TMS study - tested the causal relevance of left PPC for sound and action feature retrieval. We found that TMS over left PPC selectively impaired action judgments on low sound-low action words, as compared to sham stimulation. Computational simulations of the TMS-induced electrical field revealed that stronger stimulation of left PPC was associated with worse performance on action, but not sound, judgments. These results indicate that left PPC causally supports conceptual processing when action knowledge is task-relevant and cannot be compensated by sound knowledge. Our findings suggest that left PPC is specialized for action knowledge, challenging the view of left PPC as a multimodal conceptual hub. Overall, our studies support "hybrid theories" which posit that conceptual processing involves both modality-specific perceptual-motor regions and cross-modal convergence zones. In our new model of the conceptual system, we propose conceptual processing to rely on a representational hierarchy from modality-specific to multimodal up to amodal brain regions. Crucially, this hierarchical system is flexible, with different regions and connections being engaged in a task-dependent fashion. Our model not only reconciles the seemingly opposing grounded cognition and amodal theories, it also incorporates task dependency of conceptually-related brain activity and connectivity, thereby resolving several current issues on the neural basis of conceptual knowledge retrieval.}, language = {en} }