@article{BrevesDodel2021, author = {Breves, Priska and Dodel, Nicola}, title = {The influence of cybersickness and the media devices' mobility on the persuasive effects of 360° commercials}, series = {Multimedia Tools and Applications}, volume = {80}, journal = {Multimedia Tools and Applications}, number = {18}, issn = {1573-7721}, doi = {10.1007/s11042-021-11057-x}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-269194}, pages = {27299-27322}, year = {2021}, abstract = {With the rise of immersive media, advertisers have started to use 360° commercials to engage and persuade consumers. Two experiments were conducted to address research gaps and to validate the positive impact of 360° commercials in realistic settings. The first study (N = 62) compared the effects of 360° commercials using either a mobile cardboard head-mounted display (HMD) or a laptop. This experiment was conducted in the participants' living rooms and incorporated individual feelings of cybersickness as a moderator. The participants who experienced the 360° commercial with the HMD reported higher spatial presence and product evaluation, but their purchase intentions were only increased when their reported cybersickness was low. The second experiment (N = 197) was conducted online and analyzed the impact of 360° commercials that were experienced with mobile (smartphone/tablet) or static (laptop/desktop) devices instead of HMDs. The positive effects of omnidirectional videos were stronger when participants used mobile devices.}, language = {en} } @article{GlemarecLugrinBosseretal.2021, author = {Gl{\´e}marec, Yann and Lugrin, Jean-Luc and Bosser, Anne-Gwenn and Collins Jackson, Aryana and Buche, C{\´e}dric and Latoschik, Marc Erich}, title = {Indifferent or Enthusiastic? Virtual Audiences Animation and Perception in Virtual Reality}, series = {Frontiers in Virtual Reality}, volume = {2}, journal = {Frontiers in Virtual Reality}, doi = {10.3389/frvir.2021.666232}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-259328}, pages = {666232}, year = {2021}, abstract = {In this paper, we present a virtual audience simulation system for Virtual Reality (VR). The system implements an audience perception model controlling the nonverbal behaviors of virtual spectators, such as facial expressions or postures. Groups of virtual spectators are animated by a set of nonverbal behavior rules representing a particular audience attitude (e.g., indifferent or enthusiastic). Each rule specifies a nonverbal behavior category: posture, head movement, facial expression and gaze direction as well as three parameters: type, frequency and proportion. In a first user-study, we asked participants to pretend to be a speaker in VR and then create sets of nonverbal behaviour parameters to simulate different attitudes. Participants manipulated the nonverbal behaviours of single virtual spectator to match a specific levels of engagement and opinion toward them. In a second user-study, we used these parameters to design different types of virtual audiences with our nonverbal behavior rules and evaluated their perceptions. Our results demonstrate our system's ability to create virtual audiences with three types of different perceived attitudes: indifferent, critical, enthusiastic. The analysis of the results also lead to a set of recommendations and guidelines regarding attitudes and expressions for future design of audiences for VR therapy and training applications.}, language = {en} } @article{DoellingerWienrichLatoschik2021, author = {D{\"o}llinger, Nina and Wienrich, Carolin and Latoschik, Marc Erich}, title = {Challenges and opportunities of immersive technologies for mindfulness meditation: a systematic review}, series = {Frontiers in Virtual Reality}, volume = {2}, journal = {Frontiers in Virtual Reality}, doi = {10.3389/frvir.2021.644683}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-259047}, pages = {644683}, year = {2021}, abstract = {Mindfulness is considered an important factor of an individual's subjective well-being. Consequently, Human-Computer Interaction (HCI) has investigated approaches that strengthen mindfulness, i.e., by inventing multimedia technologies to support mindfulness meditation. These approaches often use smartphones, tablets, or consumer-grade desktop systems to allow everyday usage in users' private lives or in the scope of organized therapies. Virtual, Augmented, and Mixed Reality (VR, AR, MR; in short: XR) significantly extend the design space for such approaches. XR covers a wide range of potential sensory stimulation, perceptive and cognitive manipulations, content presentation, interaction, and agency. These facilities are linked to typical XR-specific perceptions that are conceptually closely related to mindfulness research, such as (virtual) presence and (virtual) embodiment. However, a successful exploitation of XR that strengthens mindfulness requires a systematic analysis of the potential interrelation and influencing mechanisms between XR technology, its properties, factors, and phenomena and existing models and theories of the construct of mindfulness. This article reports such a systematic analysis of XR-related research from HCI and life sciences to determine the extent to which existing research frameworks on HCI and mindfulness can be applied to XR technologies, the potential of XR technologies to support mindfulness, and open research gaps. Fifty papers of ACM Digital Library and National Institutes of Health's National Library of Medicine (PubMed) with and without empirical efficacy evaluation were included in our analysis. The results reveal that at the current time, empirical research on XR-based mindfulness support mainly focuses on therapy and therapeutic outcomes. Furthermore, most of the currently investigated XR-supported mindfulness interactions are limited to vocally guided meditations within nature-inspired virtual environments. While an analysis of empirical research on those systems did not reveal differences in mindfulness compared to non-mediated mindfulness practices, various design proposals illustrate that XR has the potential to provide interactive and body-based innovations for mindfulness practice. We propose a structured approach for future work to specify and further explore the potential of XR as mindfulness-support. The resulting framework provides design guidelines for XR-based mindfulness support based on the elements and psychological mechanisms of XR interactions.}, language = {en} } @article{WienrichDoellingerHein2021, author = {Wienrich, Carolin and D{\"o}llinger, Nina and Hein, Rebecca}, title = {Behavioral Framework of Immersive Technologies (BehaveFIT): How and why virtual reality can support behavioral change processes}, series = {Frontiers in Virtual Reality}, volume = {2}, journal = {Frontiers in Virtual Reality}, doi = {10.3389/frvir.2021.627194}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-258796}, year = {2021}, abstract = {The design and evaluation of assisting technologies to support behavior change processes have become an essential topic within the field of human-computer interaction research in general and the field of immersive intervention technologies in particular. The mechanisms and success of behavior change techniques and interventions are broadly investigated in the field of psychology. However, it is not always easy to adapt these psychological findings to the context of immersive technologies. The lack of theoretical foundation also leads to a lack of explanation as to why and how immersive interventions support behavior change processes. The Behavioral Framework for immersive Technologies (BehaveFIT) addresses this lack by 1) presenting an intelligible categorization and condensation of psychological barriers and immersive features, by 2) suggesting a mapping that shows why and how immersive technologies can help to overcome barriers and finally by 3) proposing a generic prediction path that enables a structured, theory-based approach to the development and evaluation of immersive interventions. These three steps explain how BehaveFIT can be used, and include guiding questions for each step. Further, two use cases illustrate the usage of BehaveFIT. Thus, the present paper contributes to guidance for immersive intervention design and evaluation, showing that immersive interventions support behavior change processes and explain and predict 'why' and 'how' immersive interventions can bridge the intention-behavior-gap.}, language = {en} }