@article{GrobTritscherGruebeletal.2021, author = {Grob, Robin and Tritscher, Clara and Gr{\"u}bel, Kornelia and Stigloher, Christian and Groh, Claudia and Fleischmann, Pauline N. and R{\"o}ssler, Wolfgang}, title = {Johnston's organ and its central projections in Cataglyphis desert ants}, series = {Journal of Comparative Neurology}, volume = {529}, journal = {Journal of Comparative Neurology}, number = {8}, doi = {10.1002/cne.25077}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-225679}, pages = {2138 -- 2155}, year = {2021}, abstract = {The Johnston's organ (JO) in the insect antenna is a multisensory organ involved in several navigational tasks including wind-compass orientation, flight control, graviception, and, possibly, magnetoreception. Here we investigate the three dimensional anatomy of the JO and its neuronal projections into the brain of the desert ant Cataglyphis, a marvelous long-distance navigator. The JO of C. nodus workers consists of 40 scolopidia comprising three sensory neurons each. The numbers of scolopidia slightly vary between different sexes (female/male) and castes (worker/queen). Individual scolopidia attach to the intersegmental membrane between pedicel and flagellum of the antenna and line up in a ring-like organization. Three JO nerves project along the two antennal nerve branches into the brain. Anterograde double staining of the antennal afferents revealed that JO receptor neurons project to several distinct neuropils in the central brain. The T5 tract projects into the antennal mechanosensory and motor center (AMMC), while the T6 tract bypasses the AMMC via the saddle and forms collaterals terminating in the posterior slope (PS) (T6I), the ventral complex (T6II), and the ventrolateral protocerebrum (T6III). Double labeling of JO and ocellar afferents revealed that input from the JO and visual information from the ocelli converge in tight apposition in the PS. The general JO anatomy and its central projection patterns resemble situations in honeybees and Drosophila. The multisensory nature of the JO together with its projections to multisensory neuropils in the ant brain likely serves synchronization and calibration of different sensory modalities during the ontogeny of navigation in Cataglyphis.}, language = {en} } @article{EhrenfeldHerbortButz2013, author = {Ehrenfeld, Stephan and Herbort, Oliver and Butz, Martin V.}, title = {Modular neuron-based body estimation: maintaining consistency over different limbs, modalities, and frames of reference}, series = {Frontiers in Computational Neuroscience}, volume = {7}, journal = {Frontiers in Computational Neuroscience}, number = {148}, doi = {10.3389/fncom.2013.00148}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-122253}, year = {2013}, abstract = {This paper addresses the question of how the brain maintains a probabilistic body state estimate over time from a modeling perspective. The neural Modular Modality Frame (nMMF) model simulates such a body state estimation process by continuously integrating redundant, multimodal body state information sources. The body state estimate itself is distributed over separate, but bidirectionally interacting modules. nMMF compares the incoming sensory and present body state information across the interacting modules and fuses the information sources accordingly. At the same time, nMMF enforces body state estimation consistency across the modules. nMMF is able to detect conflicting sensory information and to consequently decrease the influence of implausible sensor sources on the fly. In contrast to the previously published Modular Modality Frame (MMF) model, nMMF offers a biologically plausible neural implementation based on distributed, probabilistic population codes. Besides its neural plausibility, the neural encoding has the advantage of enabling (a) additional probabilistic information flow across the separate body state estimation modules and (b) the representation of arbitrary probability distributions of a body state. The results show that the neural estimates can detect and decrease the impact of false sensory information, can propagate conflicting information across modules, and can improve overall estimation accuracy due to additional module interactions. Even bodily illusions, such as the rubber hand illusion, can be simulated with nMMF. We conclude with an outlook on the potential of modeling human data and of invoking goal-directed behavioral control.}, language = {en} } @article{GerdesWieserAlpers2014, author = {Gerdes, Antje B. M. and Wieser, Matthias J. and Alpers, Georg W.}, title = {Emotional pictures and sounds: a review of multimodal interactions of emotion cues in multiple domains}, series = {Frontiers in Psychology}, volume = {5}, journal = {Frontiers in Psychology}, issn = {1664-1078}, doi = {10.3389/fpsyg.2014.01351}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-114548}, pages = {1351}, year = {2014}, abstract = {In everyday life, multiple sensory channels jointly trigger emotional experiences and one channel may alter processing in another channel. For example, seeing an emotional facial expression and hearing the voice's emotional tone will jointly create the emotional experience. This example, where auditory and visual input is related to social communication, has gained considerable attention by researchers. However, interactions of visual and auditory emotional information are not limited to social communication but can extend to much broader contexts including human, animal, and environmental cues. In this article, we review current research on audiovisual emotion processing beyond face-voice stimuli to develop a broader perspective on multimodal interactions in emotion processing. We argue that current concepts of multimodality should be extended in considering an ecologically valid variety of stimuli in audiovisual emotion processing. Therefore, we provide an overview of studies in which emotional sounds and interactions with complex pictures of scenes were investigated. In addition to behavioral studies, we focus on neuroimaging, electro- and peripher-physiological findings. Furthermore, we integrate these findings and identify similarities or differences. We conclude with suggestions for future research.}, language = {en} }