@article{MunozCentifantiStickleThomasetal.2021, author = {Mu{\~n}oz Centifanti, Luna C. and Stickle, Timothy R. and Thomas, Jamila and Falc{\´o}n, Amanda and Thomson, Nicholas D. and Gamer, Matthias}, title = {Reflexive Gaze Shifts and Fear Recognition Deficits in Children with Callous-Unemotional Traits and Impulsivity/Conduct Problems}, series = {Brain Sciences}, volume = {11}, journal = {Brain Sciences}, number = {10}, issn = {2076-3425}, doi = {10.3390/brainsci11101342}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-248536}, year = {2021}, abstract = {The ability to efficiently recognize the emotions on others' faces is something that most of us take for granted. Children with callous-unemotional (CU) traits and impulsivity/conduct problems (ICP), such as attention-deficit hyperactivity disorder, have been previously described as being "fear blind". This is also associated with looking less at the eye regions of fearful faces, which are highly diagnostic. Previous attempts to intervene into emotion recognition strategies have not had lasting effects on participants' fear recognition abilities. Here we present both (a) additional evidence that there is a two-part causal chain, from personality traits to face recognition strategies using the eyes, then from strategies to rates of recognizing fear in others; and (b) a pilot intervention that had persistent effects for weeks after the end of instruction. Further, the intervention led to more change in those with the highest CU traits. This both clarifies the specific mechanisms linking personality to emotion recognition and shows that the process is fundamentally malleable. It is possible that such training could promote empathy and reduce the rates of antisocial behavior in specific populations in the future.}, language = {en} } @article{SchererEllgringDieckmannetal.2019, author = {Scherer, Klaus R. and Ellgring, Heiner and Dieckmann, Anja and Unfried, Matthias and Mortillaro, Marcello}, title = {Dynamic Facial Expression of Emotion and Observer Inference}, series = {Frontiers in Psychology}, volume = {10}, journal = {Frontiers in Psychology}, number = {508}, issn = {1664-1078}, doi = {10.3389/fpsyg.2019.00508}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-195853}, year = {2019}, abstract = {Research on facial emotion expression has mostly focused on emotion recognition, assuming that a small number of discrete emotions is elicited and expressed via prototypical facial muscle configurations as captured in still photographs. These are expected to be recognized by observers, presumably via template matching. In contrast, appraisal theories of emotion propose a more dynamic approach, suggesting that specific elements of facial expressions are directly produced by the result of certain appraisals and predicting the facial patterns to be expected for certain appraisal configurations. This approach has recently been extended to emotion perception, claiming that observers first infer individual appraisals and only then make categorical emotion judgments based on the estimated appraisal patterns, using inference rules. Here, we report two related studies to empirically investigate the facial action unit configurations that are used by actors to convey specific emotions in short affect bursts and to examine to what extent observers can infer a person's emotions from the predicted facial expression configurations. The results show that (1) professional actors use many of the predicted facial action unit patterns to enact systematically specified appraisal outcomes in a realistic scenario setting, and (2) na{\"i}ve observers infer the respective emotions based on highly similar facial movement configurations with a degree of accuracy comparable to earlier research findings. Based on estimates of underlying appraisal criteria for the different emotions we conclude that the patterns of facial action units identified in this research correspond largely to prior predictions and encourage further research on appraisal-driven expression and inference.}, language = {en} }