@article{LandmannBreilHuesteggeetal.2024, author = {Landmann, Eva and Breil, Christina and Huestegge, Lynn and B{\"o}ckler, Anne}, title = {The semantics of gaze in person perception: a novel qualitative-quantitative approach}, series = {Scientific Reports}, volume = {14}, journal = {Scientific Reports}, number = {1}, issn = {2045-2322}, doi = {10.1038/s41598-024-51331-0}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-361413}, year = {2024}, abstract = {Interpreting gaze behavior is essential in evaluating interaction partners, yet the 'semantics of gaze' in dynamic interactions are still poorly understood. We aimed to comprehensively investigate effects of gaze behavior patterns in different conversation contexts, using a two-step, qualitative-quantitative procedure. Participants watched video clips of single persons listening to autobiographic narrations by another (invisible) person. The listener's gaze behavior was manipulated in terms of gaze direction, frequency and direction of gaze shifts, and blink frequency; emotional context was manipulated through the valence of the narration (neutral/negative). In Experiment 1 (qualitative-exploratory), participants freely described which states and traits they attributed to the listener in each condition, allowing us to identify relevant aspects of person perception and to construct distinct rating scales that were implemented in Experiment 2 (quantitative-confirmatory). Results revealed systematic and differential meanings ascribed to the listener's gaze behavior. For example, rapid blinking and fast gaze shifts were rated more negatively (e.g., restless and unnatural) than slower gaze behavior; downward gaze was evaluated more favorably (e.g., empathetic) than other gaze aversion types, especially in the emotionally negative context. Overall, our study contributes to a more systematic understanding of flexible gaze semantics in social interaction.}, language = {en} } @article{BreilHuesteggeBoeckler2022, author = {Breil, Christina and Huestegge, Lynn and B{\"o}ckler, Anne}, title = {From eye to arrow: Attention capture by direct gaze requires more than just the eyes}, series = {Attention, Perception \& Psychophysics}, volume = {84}, journal = {Attention, Perception \& Psychophysics}, number = {1}, issn = {1943-393X}, doi = {10.3758/s13414-021-02382-2}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-273206}, pages = {64-75}, year = {2022}, abstract = {Human attention is strongly attracted by direct gaze and sudden onset motion. The sudden direct-gaze effect refers to the processing advantage for targets appearing on peripheral faces that suddenly establish eye contact. Here, we investigate the necessity of social information for attention capture by (sudden onset) ostensive cues. Six experiments involving 204 participants applied (1) naturalistic faces, (2) arrows, (3) schematic eyes, (4) naturalistic eyes, or schematic facial configurations (5) without or (6) with head turn to an attention-capture paradigm. Trials started with two stimuli oriented towards the observer and two stimuli pointing into the periphery. Simultaneous to target presentation, one direct stimulus changed to averted and one averted stimulus changed to direct, yielding a 2 × 2 factorial design with direction and motion cues being absent or present. We replicated the (sudden) direct-gaze effect for photographic faces, but found no corresponding effects in Experiments 2-6. Hence, a holistic and socially meaningful facial context seems vital for attention capture by direct gaze. STATEMENT OF SIGNIFICANCE: The present study highlights the significance of context information for social attention. Our findings demonstrate that the direct-gaze effect, that is, the prioritization of direct gaze over averted gaze, critically relies on the presentation of a meaningful holistic and naturalistic facial context. This pattern of results is evidence in favor of early effects of surrounding social information on attention capture by direct gaze.}, language = {en} } @article{KaethnerKueblerHalder2015, author = {K{\"a}thner, Ivo and K{\"u}bler, Andrea and Halder, Sebastian}, title = {Rapid P300 brain-computer interface communication with a head-mounted display}, series = {Frontiers in Neuroscience}, volume = {9}, journal = {Frontiers in Neuroscience}, number = {207}, doi = {10.3389/fnins.2015.00207}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-148520}, year = {2015}, abstract = {Visual ERP (P300) based brain-computer interfaces (BCIs) allow for fast and reliable spelling and are intended as a muscle-independent communication channel for people with severe paralysis. However, they require the presentation of visual stimuli in the field of view of the user. A head-mounted display could allow convenient presentation of visual stimuli in situations, where mounting a conventional monitor might be difficult or not feasible (e.g., at a patient's bedside). To explore if similar accuracies can be achieved with a virtual reality (VR) headset compared to a conventional flat screen monitor, we conducted an experiment with 18 healthy participants. We also evaluated it with a person in the locked-in state (LIS) to verify that usage of the headset is possible for a severely paralyzed person. Healthy participants performed online spelling with three different display methods. In one condition a 5 x 5 letter matrix was presented on a conventional 22 inch TFT monitor. Two configurations of the VR headset were tested. In the first (glasses A), the same 5 x 5 matrix filled the field of view of the user. In the second (glasses B), single letters of the matrix filled the field of view of the user. The participant in the LIS tested the VR headset on three different occasions (glasses A condition only). For healthy participants, average online spelling accuracies were 94\% (15.5 bits/min) using three flash sequences for spelling with the monitor and glasses A and 96\% (16.2 bits/min) with glasses B. In one session, the participant in the LIS reached an online spelling accuracy of 100\% (10 bits/min) using the glasses A condition. We also demonstrated that spelling with one flash sequence is possible with the VR headset for healthy users (mean: 32.1 bits/min, maximum reached by one user: 71.89 bits/min at 100\% accuracy). We conclude that the VR headset allows for rapid P300 BCI communication in healthy users and may be a suitable display option for severely paralyzed persons.}, language = {en} }