@article{VenturaBortDolcosWendtetal.2017, author = {Ventura-Bort, Carlos and Dolcos, Florin and Wendt, Julia and Wirkner, Janine and Hamm, Alfons O. and Weymar, Mathias}, title = {Item and source memory for emotional associates is mediated by different retrieval processes}, series = {Neuropsychologia : an international journal in behavioural and cognitive neuroscience}, volume = {145}, journal = {Neuropsychologia : an international journal in behavioural and cognitive neuroscience}, publisher = {Elsevier}, address = {Oxford}, issn = {0028-3932}, doi = {10.1016/j.neuropsychologia.2017.12.015}, pages = {11}, year = {2017}, abstract = {Recent event-related potential (ERP) data showed that neutral objects encoded in emotional background pictures were better remembered than objects encoded in neutral contexts, when recognition memory was tested one week later. In the present study, we investigated whether this long-term memory advantage for items is also associated with correct memory for contextual source details. Furthermore, we were interested in the possibly dissociable contribution of familiarity and recollection processes (using a Remember/Know procedure). The results revealed that item memory performance was mainly driven by the subjective experience of familiarity, irrespective of whether the objects were previously encoded in emotional or neutral contexts. Correct source memory for the associated background picture, however, was driven by recollection and enhanced when the content was emotional. In ERPs, correctly recognized old objects evoked frontal ERP Old/New effects (300-500 ms), irrespective of context category. As in our previous study (Ventura-Bort et al., 2016b), retrieval for objects from emotional contexts was associated with larger parietal Old/New differences (600-800 ms), indicating stronger involvement of recollection. Thus, the results suggest a stronger contribution of recollection-based retrieval to item and contextual background source memory for neutral information associated with an emotional event.}, language = {en} } @misc{DolcosKatsumiMooreetal.2019, author = {Dolcos, Florin and Katsumi, Yuta and Moore, Matthew and Berggren, Nick and de Gelder, Beatrice and Derakshan, Nazanin and Hamm, Alfons O. and Koster, Ernst H. W. and Ladouceur, Cecile D. and Okon-Singer, Hadas and Ventura-Bort, Carlos and Weymar, Mathias}, title = {Neural correlates of emotion-attention interactions}, series = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, journal = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, issn = {1866-8364}, doi = {10.25932/publishup-51621}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-516213}, pages = {45}, year = {2019}, abstract = {Due to their ability to capture attention, emotional stimuli tend to benefit from enhanced perceptual processing, which can be helpful when such stimuli are task-relevant but hindering when they are task-irrelevant. Altered emotion-attention interactions have been associated with symptoms of affective disturbances, and emerging research focuses on improving emotion-attention interactions to prevent or treat affective disorders. In line with the Human Affectome Project's emphasis on linguistic components, we also analyzed the language used to describe attention-related aspects of emotion, and highlighted terms related to domains such as conscious awareness, motivational effects of attention, social attention, and emotion regulation. These terms were discussed within a broader review of available evidence regarding the neural correlates of (1) Emotion-Attention Interactions in Perception, (2) Emotion-Attention Interactions in Learning and Memory, (3) Individual Differences in Emotion-Attention Interactions, and (4) Training and Interventions to Optimize Emotion-Attention Interactions. This comprehensive approach enabled an integrative overview of the current knowledge regarding the mechanisms of emotion-attention interactions at multiple levels of analysis, and identification of emerging directions for future investigations.}, language = {en} } @article{DolcosKatsumiMooreetal.2019, author = {Dolcos, Florin and Katsumi, Yuta and Moore, Matthew and Berggren, Nick and de Gelder, Beatrice and Derakshan, Nazanin and Hamm, Alfons O. and Koster, Ernst H. W. and Ladouceur, Cecile D. and Okon-Singer, Hadas and Ventura-Bort, Carlos and Weymar, Mathias}, title = {Neural correlates of emotion-attention interactions}, series = {Neuroscience and Biobehavioral Reviews}, volume = {108}, journal = {Neuroscience and Biobehavioral Reviews}, publisher = {Elsevier}, address = {Oxford}, issn = {0149-7634}, doi = {10.1016/j.neubiorev.2019.08.017}, pages = {559 -- 601}, year = {2019}, abstract = {Due to their ability to capture attention, emotional stimuli tend to benefit from enhanced perceptual processing, which can be helpful when such stimuli are task-relevant but hindering when they are task-irrelevant. Altered emotion-attention interactions have been associated with symptoms of affective disturbances, and emerging research focuses on improving emotion-attention interactions to prevent or treat affective disorders. In line with the Human Affectome Project's emphasis on linguistic components, we also analyzed the language used to describe attention-related aspects of emotion, and highlighted terms related to domains such as conscious awareness, motivational effects of attention, social attention, and emotion regulation. These terms were discussed within a broader review of available evidence regarding the neural correlates of (1) Emotion-Attention Interactions in Perception, (2) Emotion-Attention Interactions in Learning and Memory, (3) Individual Differences in Emotion-Attention Interactions, and (4) Training and Interventions to Optimize Emotion-Attention Interactions. This comprehensive approach enabled an integrative overview of the current knowledge regarding the mechanisms of emotion-attention interactions at multiple levels of analysis, and identification of emerging directions for future investigations.}, language = {en} }