@article{KaufmannHerwegKuebler2014, author = {Kaufmann, Tobias and Herweg, Andreas and K{\"u}bler, Andrea}, title = {Toward brain-computer interface based wheelchair control utilizing tactually-evoked event-related potentials}, doi = {10.1186/1743-0003-11-7}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-110042}, year = {2014}, abstract = {Background People with severe disabilities, e.g. due to neurodegenerative disease, depend on technology that allows for accurate wheelchair control. For those who cannot operate a wheelchair with a joystick, brain-computer interfaces (BCI) may offer a valuable option. Technology depending on visual or auditory input may not be feasible as these modalities are dedicated to processing of environmental stimuli (e.g. recognition of obstacles, ambient noise). Herein we thus validated the feasibility of a BCI based on tactually-evoked event-related potentials (ERP) for wheelchair control. Furthermore, we investigated use of a dynamic stopping method to improve speed of the tactile BCI system. Methods Positions of four tactile stimulators represented navigation directions (left thigh: move left; right thigh: move right; abdomen: move forward; lower neck: move backward) and Nā€‰=ā€‰15 participants delivered navigation commands by focusing their attention on the desired tactile stimulus in an oddball-paradigm. Results Participants navigated a virtual wheelchair through a building and eleven participants successfully completed the task of reaching 4 checkpoints in the building. The virtual wheelchair was equipped with simulated shared-control sensors (collision avoidance), yet these sensors were rarely needed. Conclusion We conclude that most participants achieved tactile ERP-BCI control sufficient to reliably operate a wheelchair and dynamic stopping was of high value for tactile ERP classification. Finally, this paper discusses feasibility of tactile ERPs for BCI based wheelchair control.}, language = {en} } @article{KleihHerwegKaufmannetal.2015, author = {Kleih, Sonja C. and Herweg, Andreas and Kaufmann, Tobias and Staiger-S{\"a}lzer, Pit and Gerstner, Natascha and K{\"u}bler, Andrea}, title = {The WIN-speller: a new intuitive auditory brain-computer interface spelling application}, series = {Frontiers in Neuroscience}, volume = {9}, journal = {Frontiers in Neuroscience}, doi = {10.3389/fnins.2015.00346}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-125972}, pages = {346}, year = {2015}, abstract = {The objective of this study was to test the usability of a new auditory Brain-Computer Interface (BCI) application for communication. We introduce a word based, intuitive auditory spelling paradigm the WIN-speller. In the WIN-speller letters are grouped by words, such as the word KLANG representing the letters A, G, K, L, and N. Thereby, the decoding step between perceiving a code and translating it to the stimuli it represents becomes superfluous. We tested 11 healthy volunteers and four end-users with motor impairment in the copy spelling mode. Spelling was successful with an average accuracy of 84\% in the healthy sample. Three of the end-users communicated with average accuracies of 80\% or higher while one user was not able to communicate reliably. Even though further evaluation is required, the WIN-speller represents a potential alternative for BCI based communication in end-users.}, language = {en} }