@article{WieserMoscovitch2015, author = {Wieser, Matthias J. and Moscovitch, David A.}, title = {The effect of affective context on visuocortical processing of neutral faces in social anxiety - An ERP study}, series = {Frontiers in Psychology}, volume = {6}, journal = {Frontiers in Psychology}, doi = {10.3389/fpsyg.2015.01824}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-125148}, pages = {1824}, year = {2015}, abstract = {It has been demonstrated that verbal context information alters the neural processing of ambiguous faces such as faces with no apparent facial expression. In social anxiety, neutral faces may be implicitly threatening for socially anxious individuals due to their ambiguous nature, but even more so if these neutral faces are put in self-referential negative contexts. Therefore, we measured event-related brain potentials (ERPs) in response to neutral faces which were preceded by affective verbal information (negative, neutral, positive). Participants with low social anxiety (LSA; n = 23) and high social anxiety (HSA; n = 21) were asked to watch and rate valence and arousal of the respective faces while continuous EEG was recorded. ERP analysis revealed that HSA showed elevated P100 amplitudes in response to faces, but reduced structural encoding of faces as indexed by reduced N170 amplitudes. In general, affective context led to an enhanced early posterior negativity (EPN) for negative compared to neutral facial expressions. Moreover, HSA compared to LSA showed enhanced late positive potentials (LPP) to negatively contextualized faces, whereas in LSA this effect was found for faces in positive contexts. Also, HSA rated faces in negative contexts as more negative compared to LSA. These results point at enhanced vigilance for neutral faces regardless of context in HSA, while structural encoding seems to be diminished (avoidance). Interestingly, later components of sustained processing (LPP) indicate that LSA show enhanced visuocortical processing for faces in positive contexts (happy bias), whereas this seems to be the case for negatively contextualized faces in HSA (threat bias). Finally, our results add further new evidence that top-down information in interaction with individual anxiety levels can influence early-stage aspects of visual perception.}, language = {en} } @article{SchwarzWieserGerdesetal.2013, author = {Schwarz, Katharina A. and Wieser, Matthias J. and Gerdes, Antje B. M. and M{\"u}hlberger, Andreas and Pauli, Paul}, title = {Why are you looking like that? How the context influences evaluation and processing of human faces}, series = {Social Cognitive and Affective Neuroscience}, volume = {8}, journal = {Social Cognitive and Affective Neuroscience}, number = {4}, doi = {10.1093/scan/nss013}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-132126}, pages = {438-445}, year = {2013}, abstract = {Perception and evaluation of facial expressions are known to be heavily modulated by emotional features of contextual information. Such contextual effects, however, might also be driven by non-emotional aspects of contextual information, an interaction of emotional and non-emotional factors, and by the observers' inherent traits. Therefore, we sought to assess whether contextual information about self-reference in addition to information about valence influences the evaluation and neural processing of neutral faces. Furthermore, we investigated whether social anxiety moderates these effects. In the present functional magnetic resonance imaging (fMRI) study, participants viewed neutral facial expressions preceded by a contextual sentence conveying either positive or negative evaluations about the participant or about somebody else. Contextual influences were reflected in rating and fMRI measures, with strong effects of self-reference on brain activity in the medial prefrontal cortex and right fusiform gyrus. Additionally, social anxiety strongly affected the response to faces conveying negative, self-related evaluations as revealed by the participants' rating patterns and brain activity in cortical midline structures and regions of interest in the left and right middle frontal gyrus. These results suggest that face perception and processing are highly individual processes influenced by emotional and non-emotional aspects of contextual information and further modulated by individual personality traits.}, language = {en} } @article{KiserGromerPaulietal.2022, author = {Kiser, Dominik P. and Gromer, Daniel and Pauli, Paul and Hilger, Kirsten}, title = {A virtual reality social conditioned place preference paradigm for humans: Does trait social anxiety affect approach and avoidance of virtual agents?}, series = {Frontiers in Virtual Reality}, volume = {3}, journal = {Frontiers in Virtual Reality}, issn = {2673-4192}, doi = {10.3389/frvir.2022.916575}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-293564}, year = {2022}, abstract = {Approach and avoidance of positive and negative social cues are fundamental to prevent isolation and ensure survival. High trait social anxiety is characterized by an avoidance of social situations and extensive avoidance is a risk factor for the development of social anxiety disorder (SAD). Therefore, experimental methods to assess social avoidance behavior in humans are essential. The social conditioned place preference (SCPP) paradigm is a well-established experimental paradigm in animal research that is used to objectively investigate social approach-avoidance mechanisms. We retranslated this paradigm for human research using virtual reality. To this end, 58 healthy adults were exposed to either a happy- or angry-looking virtual agent in a specific room, and the effects of this encounter on dwell time as well as evaluation of this room in a later test without an agent were examined. We did not observe a general SCPP effect on dwell time or ratings but discovered a moderation by trait social anxiety, in which participants with higher trait social anxiety spent less time in the room in which the angry agent was present before, suggesting that higher levels of trait social anxiety foster conditioned social avoidance. However, further studies are needed to verify this observation and substantiate an association with social anxiety disorder. We discussed the strengths, limitations, and technical implications of our paradigm for future investigations to more comprehensively understand the mechanisms involved in social anxiety and facilitate the development of new personalized treatment approaches by using virtual reality.}, language = {en} } @article{HamannBankmannMoraMazaetal.2022, author = {Hamann, Catharina S. and Bankmann, Julian and Mora Maza, Hanna and Kornhuber, Johannes and Zoicas, Iulia and Schmitt-B{\"o}hrer, Angelika}, title = {Social fear affects limbic system neuronal activity and gene expression}, series = {International Journal of Molecular Sciences}, volume = {23}, journal = {International Journal of Molecular Sciences}, number = {15}, issn = {1422-0067}, doi = {10.3390/ijms23158228}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-284274}, year = {2022}, abstract = {Social anxiety disorder (SAD) is a highly prevalent and comorbid anxiety disorder with rather unclear underlying mechanisms. Here, we aimed to characterize neurobiological changes occurring in mice expressing symptoms of social fear and to identify possible therapeutic targets for SAD. Social fear was induced via social fear conditioning (SFC), a validated animal model of SAD. We assessed the expression levels of the immediate early genes (IEGs) cFos, Fosl2 and Arc as markers of neuronal activity and the expression levels of several genes of the GABAergic, serotoninergic, oxytocinergic, vasopressinergic and neuropeptide Y (NPY)-ergic systems in brain regions involved in social behavior or fear-related behavior in SFC+ and SFC- mice 2 h after exposure to a conspecific. SFC+ mice showed a decreased number and density of cFos-positive cells and decreased expression levels of IEGs in the dorsal hippocampus. SFC+ mice also showed alterations in the expression of NPY and serotonin system-related genes in the paraventricular nucleus of the hypothalamus, basolateral amygdala, septum and dorsal raphe nucleus, but not in the dorsal hippocampus. Our results describe neuronal alterations occurring during the expression of social fear and identify the NPY and serotonergic systems as possible targets in the treatment of SAD.}, language = {en} } @article{GerdesWieserAlpers2014, author = {Gerdes, Antje B. M. and Wieser, Matthias J. and Alpers, Georg W.}, title = {Emotional pictures and sounds: a review of multimodal interactions of emotion cues in multiple domains}, series = {Frontiers in Psychology}, volume = {5}, journal = {Frontiers in Psychology}, issn = {1664-1078}, doi = {10.3389/fpsyg.2014.01351}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-114548}, pages = {1351}, year = {2014}, abstract = {In everyday life, multiple sensory channels jointly trigger emotional experiences and one channel may alter processing in another channel. For example, seeing an emotional facial expression and hearing the voice's emotional tone will jointly create the emotional experience. This example, where auditory and visual input is related to social communication, has gained considerable attention by researchers. However, interactions of visual and auditory emotional information are not limited to social communication but can extend to much broader contexts including human, animal, and environmental cues. In this article, we review current research on audiovisual emotion processing beyond face-voice stimuli to develop a broader perspective on multimodal interactions in emotion processing. We argue that current concepts of multimodality should be extended in considering an ecologically valid variety of stimuli in audiovisual emotion processing. Therefore, we provide an overview of studies in which emotional sounds and interactions with complex pictures of scenes were investigated. In addition to behavioral studies, we focus on neuroimaging, electro- and peripher-physiological findings. Furthermore, we integrate these findings and identify similarities or differences. We conclude with suggestions for future research.}, language = {en} }