@phdthesis{Anderson2011, author = {Anderson, Christina}, title = {Idiosyncratic Facial Movement in Face Perception and Recognition}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-70355}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2011}, abstract = {It has been proposed that different features of a face provide a source of information for separate perceptual and cognitive processes. Properties of a face that remain rather stable over time, so called invariant facial features, yield information about a face's identity, and changeable aspects of faces transmit information underlying social communication such as emotional expressions and speech movements. While processing of these different face properties was initially claimed to be independent, a growing body of evidence suggests that these sources of information can interact when people recognize faces with whom they are familiar. This is the case because the way a face moves can contain patterns that are characteristic for that specific person, so called idiosyncratic movements. As a face becomes familiar these idiosyncratic movements are learned and hence also provide information serving face identification. While an abundance of experiments has addressed the independence of invariant and variable facial features in face recognition, little is known about the exact nature of the impact idiosyncratic facial movements have on face recognition. Gaining knowledge about the way facial motion contributes to face recognition is, however, important for a deeper understanding of the way the brain processes and recognizes faces. In the following dissertation three experiments are reported that investigate the impact familiarity of changeable facial features has on processes of face recognition. Temporal aspects of the processing of familiar idiosyncratic facial motion were addressed in the first experiment via EEG by investigating the influence familiar facial movement exerts on event-related potentials associated to face processing and face recognition. After being familiarized with a face and its idiosyncratic movement, participants viewed familiar or unfamiliar faces with familiar or unfamiliar facial movement while their brain potentials were recorded. Results showed that familiarity of facial motion influenced later event-related potentials linked to memory processes involved in face recognition. The second experiment used fMRI to investigate the brain areas involved in processing familiar facial movement. Participants' BOLD-signal was registered while they viewed familiar and unfamiliar faces with familiar or unfamiliar idiosyncratic movement. It was found that activity of brain regions, such as the fusiform gyrus, that underlie the processing of face identity, was modulated by familiar facial movement. Together these two experiments provide valuable information about the nature of the involvement of idiosyncratic facial movement in face recognition and have important implications for cognitive and neural models of face perception and recognition. The third experiment addressed the question whether idiosyncratic facial movement could increase individuation in perceiving faces from a different ethnic group and hence reduce impaired recognition of these other-race faces compared to own-race faces, a phenomenon named the own-race bias. European participants viewed European and African faces that were each animated with an idiosyncratic smile while their attention was either directed to the form or the motion of the face. Subsequently recognition memory for these faces was tested. Results showed that the own-race bias was equally present in both attention conditions indicating that idiosyncratic facial movement was not able to reduce or diminish the own-race bias. In combination the here presented experiments provide further insight into the involvement of idiosyncratic facial motion in face recognition. It is necessary to consider the dynamic component of faces when investigating face recognition because static facial images are not able to provide the full range of information that leads to recognition of a face. In order to reflect the full process of face recognition, cognitive and neural models of face perception and recognition need to integrate dynamic facial features as a source of information which contributes to the recognition of a face.}, subject = {Gesicht}, language = {en} } @phdthesis{Homola2011, author = {Homola, Gy{\"o}rgy {\´A}d{\´a}m}, title = {Functional and Microstructural MRI of the Human Brain Revealing a Cerebral Network Processing the Age of Faces}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-56740}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2011}, abstract = {Although age is one of the most salient and fundamental aspects of human faces, its processing in the brain has not yet been studied by any neuroimaging experiment. Automatic assessment of temporal changes across faces is a prerequisite to identifying persons over their life-span, and age per se is of biological and social relevance. Using a combination of evocative face morphs controlled for global optical flow and functional magnetic resonance imaging (fMRI), we segregate two areas that process changes of facial age in both hemispheres. These areas extend beyond the previously established face-sensitive network and are centered on the posterior inferior temporal sulcus (pITS) and the posterior angular gyrus (pANG), an evolutionarily new formation of the human brain. Using probabilistic tractography and by calculating spatial cross-correlations as well as creating minimum intersection maps between activation and connectivity patterns we demonstrate a hitherto unrecognized link between structure and function in the human brain on the basis of cognitive age processing. According to our results, implicit age processing involves the inferior temporal sulci and is, at the same time, closely tied to quantity decoding by the presumed neural systems devoted to magnitudes in the human parietal lobes. The ventral portion of Wernicke's largely forgotten perpendicular association fasciculus is shown not only to interconnect these two areas but to relate to their activations, i.e. to transmit age-relevant information. In particular, post-hoc age-rating competence is shown to be associated with high response levels in the left angular gyrus. Cortical activation patterns related to changes of facial age differ from those previously elicited by other fixed as well as changeable face aspects such as gender (used for comparison), ethnicity and identity as well as eye gaze or facial expressions. We argue that this may be due to the fact that individual changes of facial age occur ontogenetically, unlike the instant changes of gaze direction or expressive content in faces that can be "mirrored" and require constant cognitive monitoring to follow. Discussing the ample evidence for distinct representations of quantitative age as opposed to categorical gender varied over continuous androgyny levels, we suggest that particular face-sensitive regions interact with additional object-unselective quantification modules to obtain individual estimates of facial age.}, subject = {Gesicht}, language = {en} }