@article{KaufmannHerwegKuebler2014, author = {Kaufmann, Tobias and Herweg, Andreas and K{\"u}bler, Andrea}, title = {Toward brain-computer interface based wheelchair control utilizing tactually-evoked event-related potentials}, doi = {10.1186/1743-0003-11-7}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-110042}, year = {2014}, abstract = {Background People with severe disabilities, e.g. due to neurodegenerative disease, depend on technology that allows for accurate wheelchair control. For those who cannot operate a wheelchair with a joystick, brain-computer interfaces (BCI) may offer a valuable option. Technology depending on visual or auditory input may not be feasible as these modalities are dedicated to processing of environmental stimuli (e.g. recognition of obstacles, ambient noise). Herein we thus validated the feasibility of a BCI based on tactually-evoked event-related potentials (ERP) for wheelchair control. Furthermore, we investigated use of a dynamic stopping method to improve speed of the tactile BCI system. Methods Positions of four tactile stimulators represented navigation directions (left thigh: move left; right thigh: move right; abdomen: move forward; lower neck: move backward) and Nā€‰=ā€‰15 participants delivered navigation commands by focusing their attention on the desired tactile stimulus in an oddball-paradigm. Results Participants navigated a virtual wheelchair through a building and eleven participants successfully completed the task of reaching 4 checkpoints in the building. The virtual wheelchair was equipped with simulated shared-control sensors (collision avoidance), yet these sensors were rarely needed. Conclusion We conclude that most participants achieved tactile ERP-BCI control sufficient to reliably operate a wheelchair and dynamic stopping was of high value for tactile ERP classification. Finally, this paper discusses feasibility of tactile ERPs for BCI based wheelchair control.}, language = {en} } @article{KleihHerwegKaufmannetal.2015, author = {Kleih, Sonja C. and Herweg, Andreas and Kaufmann, Tobias and Staiger-S{\"a}lzer, Pit and Gerstner, Natascha and K{\"u}bler, Andrea}, title = {The WIN-speller: a new intuitive auditory brain-computer interface spelling application}, series = {Frontiers in Neuroscience}, volume = {9}, journal = {Frontiers in Neuroscience}, doi = {10.3389/fnins.2015.00346}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-125972}, pages = {346}, year = {2015}, abstract = {The objective of this study was to test the usability of a new auditory Brain-Computer Interface (BCI) application for communication. We introduce a word based, intuitive auditory spelling paradigm the WIN-speller. In the WIN-speller letters are grouped by words, such as the word KLANG representing the letters A, G, K, L, and N. Thereby, the decoding step between perceiving a code and translating it to the stimuli it represents becomes superfluous. We tested 11 healthy volunteers and four end-users with motor impairment in the copy spelling mode. Spelling was successful with an average accuracy of 84\% in the healthy sample. Three of the end-users communicated with average accuracies of 80\% or higher while one user was not able to communicate reliably. Even though further evaluation is required, the WIN-speller represents a potential alternative for BCI based communication in end-users.}, language = {en} } @phdthesis{Herweg2016, author = {Herweg, Andreas}, title = {Beyond the state of the art, towards intuitive and reliable non-visual Brain-Computer-Interfacing}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-133447}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2016}, abstract = {For the present work three main goals were formulated: goal 1 To design a tactile BCI used for mobility which is intuitive (G1.1), reliable and fast while being usable by participants aged 50 years and above. goal 2 To design an auditory BCI used for communication which is intuitive and reliable. goal 3 To examine the effects of training on tactile and auditory BCI performance. Three studies were performed to achieve these goals. In the first study nine participants aged above 50 years performed a five-session training after which eight participants were able to navigate a virtual wheelchair with mean accuracy above 95\% and an ITR above 20 bits / min. In the second study 15 participants, four of them endusers with motor-impairment, were able to communicate meaningful with high accuracies using an auditory BCI. In the third study nine healthy and nine visually impaired participants (regarded as sensory experts for non-visual perception) performed tactile, auditory and visual (for healthy participants only) copy tasks. Participants with trained perception significantly outperformed control participants for tactile but not for auditory performance. Tactile performance of sensory experts was on equal levels as the visual performance of control participants. We were able to demonstrate viability of intuitive gazeindependent tactile and auditory BCI. Our tactile BCI performed on levels similar to those of visual BCI, outperforming current tactile BCI protocols. Furthermore, we were able to demonstrate significant beneficial effect of training on tactile BCI performance. Our results demonstrate previously untapped potential for tactile BCI and avenues for future research in the field of gaze-independent BCI.}, subject = {Gehirn-Computer-Schnittstelle}, language = {en} }