@phdthesis{Baunach2012, author = {Baunach, Marcel}, title = {Advances in Distributed Real-Time Sensor/Actuator Systems Operation - Operating Systems, Communication, and Application Design Concepts -}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-76489}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2012}, abstract = {This work takes a close look at several quite different research areas related to the design of networked embedded sensor/actuator systems. The variety of the topics illustrates the potential complexity of current sensor network applications; especially when enriched with actuators for proactivity and environmental interaction. Besides their conception, development, installation and long-term operation, we'll mainly focus on more "low-level" aspects: Compositional hardware and software design, task cooperation and collaboration, memory management, and real-time operation will be addressed from a local node perspective. In contrast, inter-node synchronization, communication, as well as sensor data acquisition, aggregation, and fusion will be discussed from a rather global network view. The diversity in the concepts was intentionally accepted to finally facilitate the reliable implementation of truly complex systems. In particular, these should go beyond the usual "sense and transmit of sensor data", but show how powerful today's networked sensor/actuator systems can be despite of their low computational performance and constrained hardware: If their resources are only coordinated efficiently!}, subject = {Eingebettetes System}, language = {en} } @article{BayerPruckner2023, author = {Bayer, Daniel and Pruckner, Marco}, title = {A digital twin of a local energy system based on real smart meter data}, series = {Energy Informatics}, volume = {6}, journal = {Energy Informatics}, doi = {10.1186/s42162-023-00263-6}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-357456}, year = {2023}, abstract = {The steadily increasing usage of smart meters generates a valuable amount of high-resolution data about the individual energy consumption and production of local energy systems. Private households install more and more photovoltaic systems, battery storage and big consumers like heat pumps. Thus, our vision is to augment these collected smart meter time series of a complete system (e.g., a city, town or complex institutions like airports) with simulatively added previously named components. We, therefore, propose a novel digital twin of such an energy system based solely on a complete set of smart meter data including additional building data. Based on the additional geospatial data, the twin is intended to represent the addition of the abovementioned components as realistically as possible. Outputs of the twin can be used as a decision support for either system operators where to strengthen the system or for individual households where and how to install photovoltaic systems and batteries. Meanwhile, the first local energy system operators had such smart meter data of almost all residential consumers for several years. We acquire those of an exemplary operator and discuss a case study presenting some features of our digital twin and highlighting the value of the combination of smart meter and geospatial data.}, language = {en} } @article{BeckerCaminitiFiorellaetal.2013, author = {Becker, Martin and Caminiti, Saverio and Fiorella, Donato and Francis, Louise and Gravino, Pietro and Haklay, Mordechai (Muki) and Hotho, Andreas and Loreto, Virrorio and Mueller, Juergen and Ricchiuti, Ferdinando and Servedio, Vito D. P. and Sirbu, Alina and Tria, Franesca}, title = {Awareness and Learning in Participatory Noise Sensing}, series = {PLOS ONE}, volume = {8}, journal = {PLOS ONE}, number = {12}, issn = {1932-6203}, doi = {10.1371/journal.pone.0081638}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-127675}, pages = {e81638}, year = {2013}, abstract = {The development of ICT infrastructures has facilitated the emergence of new paradigms for looking at society and the environment over the last few years. Participatory environmental sensing, i.e. directly involving citizens in environmental monitoring, is one example, which is hoped to encourage learning and enhance awareness of environmental issues. In this paper, an analysis of the behaviour of individuals involved in noise sensing is presented. Citizens have been involved in noise measuring activities through the WideNoise smartphone application. This application has been designed to record both objective (noise samples) and subjective (opinions, feelings) data. The application has been open to be used freely by anyone and has been widely employed worldwide. In addition, several test cases have been organised in European countries. Based on the information submitted by users, an analysis of emerging awareness and learning is performed. The data show that changes in the way the environment is perceived after repeated usage of the application do appear. Specifically, users learn how to recognise different noise levels they are exposed to. Additionally, the subjective data collected indicate an increased user involvement in time and a categorisation effect between pleasant and less pleasant environments.}, language = {en} } @article{BencurovaShityakovSchaacketal.2022, author = {Bencurova, Elena and Shityakov, Sergey and Schaack, Dominik and Kaltdorf, Martin and Sarukhanyan, Edita and Hilgarth, Alexander and Rath, Christin and Montenegro, Sergio and Roth, G{\"u}nter and Lopez, Daniel and Dandekar, Thomas}, title = {Nanocellulose composites as smart devices with chassis, light-directed DNA Storage, engineered electronic properties, and chip integration}, series = {Frontiers in Bioengineering and Biotechnology}, volume = {10}, journal = {Frontiers in Bioengineering and Biotechnology}, issn = {2296-4185}, doi = {10.3389/fbioe.2022.869111}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-283033}, year = {2022}, abstract = {The rapid development of green and sustainable materials opens up new possibilities in the field of applied research. Such materials include nanocellulose composites that can integrate many components into composites and provide a good chassis for smart devices. In our study, we evaluate four approaches for turning a nanocellulose composite into an information storage or processing device: 1) nanocellulose can be a suitable carrier material and protect information stored in DNA. 2) Nucleotide-processing enzymes (polymerase and exonuclease) can be controlled by light after fusing them with light-gating domains; nucleotide substrate specificity can be changed by mutation or pH change (read-in and read-out of the information). 3) Semiconductors and electronic capabilities can be achieved: we show that nanocellulose is rendered electronic by iodine treatment replacing silicon including microstructures. Nanocellulose semiconductor properties are measured, and the resulting potential including single-electron transistors (SET) and their properties are modeled. Electric current can also be transported by DNA through G-quadruplex DNA molecules; these as well as classical silicon semiconductors can easily be integrated into the nanocellulose composite. 4) To elaborate upon miniaturization and integration for a smart nanocellulose chip device, we demonstrate pH-sensitive dyes in nanocellulose, nanopore creation, and kinase micropatterning on bacterial membranes as well as digital PCR micro-wells. Future application potential includes nano-3D printing and fast molecular processors (e.g., SETs) integrated with DNA storage and conventional electronics. This would also lead to environment-friendly nanocellulose chips for information processing as well as smart nanocellulose composites for biomedical applications and nano-factories.}, language = {en} } @phdthesis{Betz2005, author = {Betz, Christian}, title = {Scalable authoring of diagnostic case based training systems}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-17885}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2005}, abstract = {Diagnostic Case Based Training Systems (D-CBT) provide learners with a means to learn and exercise knowledge in a realistic context. In medical education, D-CBT Systems present virtual patients to the learners who are asked to examine, diagnose and state therapies for these patients. Due a number of conflicting and changing requirements, e.g. time for learning, authoring effort, several systems were developed so far. These systems range from simple, easy-to-use presentation systems to highly complex knowledge based systems supporting explorative learning. This thesis presents an approach and tools to create D-CBT systems from existing sources (documents, e.g. dismissal records) using existing tools (word processors): Authors annotate and extend the documents to model the knowledge. A scalable knowledge representation is able to capture the content on multiple levels, from simple to highly structured knowledge. Thus, authoring of D-CBT systems requires less prerequisites and pre-knowledge and is faster than approaches using specialized authoring environments. Also, authors can iteratively add and structure more knowledge to adapt training cases to their learners needs. The theses also discusses the application of the same approach to other domains, especially to knowledge acquisition for the Semantic Web.}, subject = {Computerunterst{\"u}tztes Lernen}, language = {en} } @phdthesis{Binder2006, author = {Binder, Andreas}, title = {Die stochastische Wissenschaft und zwei Teilsysteme eines Web-basierten Informations- und Anwendungssystems zu ihrer Etablierung}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-26146}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2006}, abstract = {Das stochastische Denken, die Bernoullische Stochastik und dessen informationstechnologische Umsetzung, namens Stochastikon stellen die Grundlage f{\"u}r das Verst{\"a}ndnis und die erfolgreiche Nutzung einer stochastischen Wissenschaft dar. Im Rahmen dieser Arbeit erfolgt eine Kl{\"a}rung des Begriffs des stochastischen Denkens, eine anschauliche Darstellung der von Elart von Collani entwickelten Bernoullischen Stochastik und eine Beschreibung von Stochastikon. Dabei werden sowohl das Gesamtkonzept von Stochastikon, sowie die Ziele, Aufgaben und die Realisierung der beiden Teilsysteme namens Mentor und Encyclopedia vorgestellt. Das stochastische Denken erlaubt eine realit{\"a}tsnahe Sichtweise der Dinge, d.h. eine Sichtweise, die mit den menschlichen Beobachtungen und Erfahrungen im Einklang steht und somit die Unsicherheit {\"u}ber zuk{\"u}nftige Entwicklungen ber{\"u}cksichtigt. Der in diesem Kontext verwendete Begriff der Unsicherheit bezieht sich ausschließlich auf zuk{\"u}nftige Entwicklungen und {\"a}ußert sich in Variabilit{\"a}t. Quellen der Unsicherheit sind einerseits die menschliche Ignoranz und andererseits der Zufall. Unter Ignoranz wird hierbei die Unwissenheit des Menschen {\"u}ber die unbekannten, aber feststehenden Fakten verstanden, die die Anfangsbedingungen der zuk{\"u}nftigen Entwicklung repr{\"a}sentieren. Die Bernoullische Stochastik liefert ein Regelwerk und erm{\"o}glicht die Entwicklung eines quantitativen Modells zur Beschreibung der Unsicherheit und expliziter Einbeziehung der beiden Quellen Ignoranz und Zufall. Das Modell tr{\"a}gt den Namen Bernoulli-Raum und bildet die Grundlage f{\"u}r die Herleitung quantitativer Verfahren, um zuverl{\"a}ssige und genaue Aussagen sowohl {\"u}ber die nicht-existente zuf{\"a}llige Zukunft (Vorhersageverfahren), als auch {\"u}ber die unbekannte feststehende Vergangenheit (Messverfahren). Das Softwaresystem Stochastikon implementiert die Bernoullische Stochastik in Form einer Reihe autarker, miteinander kommunizierender Teilsysteme. Ziel des Teilsystems Encyclopedia ist die Bereitstellung und Bewertung stochastischen Wissens. Das Teilsystem Mentor dient der Unterst{\"u}tzung des Anwenders bei der Probleml{\"o}sungsfindung durch Identifikation eines richtigen Modells bzw. eines korrekten Bernoulli-Raums. Der L{\"o}sungsfindungsprozess selber enth{\"a}lt keinerlei Unsicherheit. Die ganze Unsicherheit steckt in der L{\"o}sung, d.h. im Bernoulli-Raum, der explizit die vorhandene Unwissenheit (Ignoranz) und den vorliegenden Zufall abdeckend enth{\"a}lt.}, subject = {Stochastik}, language = {de} } @phdthesis{Binzenhoefer2007, author = {Binzenh{\"o}fer, Andreas}, title = {Performance Analysis of Structured Overlay Networks}, doi = {10.25972/OPUS-2250}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-26291}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2007}, abstract = {Overlay networks establish logical connections between users on top of the physical network. While randomly connected overlay networks provide only a best effort service, a new generation of structured overlay systems based on Distributed Hash Tables (DHTs) was proposed by the research community. However, there is still a lack of understanding the performance of such DHTs. Additionally, those architectures are highly distributed and therefore appear as a black box to the operator. Yet an operator does not want to lose control over his system and needs to be able to continuously observe and examine its current state at runtime. This work addresses both problems and shows how the solutions can be combined into a more self-organizing overlay concept. At first, we evaluate the performance of structured overlay networks under different aspects and thereby illuminate in how far such architectures are able to support carrier-grade applications. Secondly, to enable operators to monitor and understand their deployed system in more detail, we introduce both active as well as passive methods to gather information about the current state of the overlay network.}, subject = {Overlay-Netz}, language = {en} } @phdthesis{Bleier2023, author = {Bleier, Michael}, title = {Underwater Laser Scanning - Refractive Calibration, Self-calibration and Mapping for 3D Reconstruction}, isbn = {978-3-945459-45-4}, doi = {10.25972/OPUS-32269}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-322693}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2023}, abstract = {There is great interest in affordable, precise and reliable metrology underwater: Archaeologists want to document artifacts in situ with high detail. In marine research, biologists require the tools to monitor coral growth and geologists need recordings to model sediment transport. Furthermore, for offshore construction projects, maintenance and inspection millimeter-accurate measurements of defects and offshore structures are essential. While the process of digitizing individual objects and complete sites on land is well understood and standard methods, such as Structure from Motion or terrestrial laser scanning, are regularly applied, precise underwater surveying with high resolution is still a complex and difficult task. Applying optical scanning techniques in water is challenging due to reduced visibility caused by turbidity and light absorption. However, optical underwater scanners provide significant advantages in terms of achievable resolution and accuracy compared to acoustic systems. This thesis proposes an underwater laser scanning system and the algorithms for creating dense and accurate 3D scans in water. It is based on laser triangulation and the main optical components are an underwater camera and a cross-line laser projector. The prototype is configured with a motorized yaw axis for capturing scans from a tripod. Alternatively, it is mounted to a moving platform for mobile mapping. The main focus lies on the refractive calibration of the underwater camera and laser projector, the image processing and 3D reconstruction. For highest accuracy, the refraction at the individual media interfaces must be taken into account. This is addressed by an optimization-based calibration framework using a physical-geometric camera model derived from an analytical formulation of a ray-tracing projection model. In addition to scanning underwater structures, this work presents the 3D acquisition of semi-submerged structures and the correction of refraction effects. As in-situ calibration in water is complex and time-consuming, the challenge of transferring an in-air scanner calibration to water without re-calibration is investigated, as well as self-calibration techniques for structured light. The system was successfully deployed in various configurations for both static scanning and mobile mapping. An evaluation of the calibration and 3D reconstruction using reference objects and a comparison of free-form surfaces in clear water demonstrate the high accuracy potential in the range of one millimeter to less than one centimeter, depending on the measurement distance. Mobile underwater mapping and motion compensation based on visual-inertial odometry is demonstrated using a new optical underwater scanner based on fringe projection. Continuous registration of individual scans allows the acquisition of 3D models from an underwater vehicle. RGB images captured in parallel are used to create 3D point clouds of underwater scenes in full color. 3D maps are useful to the operator during the remote control of underwater vehicles and provide the building blocks to enable offshore inspection and surveying tasks. The advancing automation of the measurement technology will allow non-experts to use it, significantly reduce acquisition time and increase accuracy, making underwater metrology more cost-effective.}, subject = {Selbstkalibrierung}, language = {en} } @phdthesis{Borrmann2018, author = {Borrmann, Dorit}, title = {Multi-modal 3D mapping - Combining 3D point clouds with thermal and color information}, isbn = {978-3-945459-20-1}, issn = {1868-7474}, doi = {10.25972/OPUS-15708}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-157085}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2018}, abstract = {Imagine a technology that automatically creates a full 3D thermal model of an environment and detects temperature peaks in it. For better orientation in the model it is enhanced with color information. The current state of the art for analyzing temperature related issues is thermal imaging. It is relevant for energy efficiency but also for securing important infrastructure such as power supplies and temperature regulation systems. Monitoring and analysis of the data for a large building is tedious as stable conditions need to be guaranteed for several hours and detailed notes about the pose and the environment conditions for each image must be taken. For some applications repeated measurements are necessary to monitor changes over time. The analysis of the scene is only possible through expertise and experience. This thesis proposes a robotic system that creates a full 3D model of the environment with color and thermal information by combining thermal imaging with the technology of terrestrial laser scanning. The addition of a color camera facilitates the interpretation of the data and allows for other application areas. The data from all sensors collected at different positions is joined in one common reference frame using calibration and scan matching. The first part of the thesis deals with 3D point cloud processing with the emphasis on accessing point cloud data efficiently, detecting planar structures in the data and registering multiple point clouds into one common coordinate system. The second part covers the autonomous exploration and data acquisition with a mobile robot with the objective to minimize the unseen area in 3D space. Furthermore, the combination of different modalities, color images, thermal images and point cloud data through calibration is elaborated. The last part presents applications for the the collected data. Among these are methods to detect the structure of building interiors for reconstruction purposes and subsequent detection and classification of windows. A system to project the gathered thermal information back into the scene is presented as well as methods to improve the color information and to join separately acquired point clouds and photo series. A full multi-modal 3D model contains all the relevant geometric information about the recorded scene and enables an expert to fully analyze it off-site. The technology clears the path for automatically detecting points of interest thereby helping the expert to analyze the heat flow as well as localize and identify heat leaks. The concept is modular and neither limited to achieving energy efficiency nor restricted to the use in combination with a mobile platform. It also finds its application in fields such as archaeology and geology and can be extended by further sensors.}, subject = {Punktwolke}, language = {en} } @article{BrevesDodel2021, author = {Breves, Priska and Dodel, Nicola}, title = {The influence of cybersickness and the media devices' mobility on the persuasive effects of 360° commercials}, series = {Multimedia Tools and Applications}, volume = {80}, journal = {Multimedia Tools and Applications}, number = {18}, issn = {1573-7721}, doi = {10.1007/s11042-021-11057-x}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-269194}, pages = {27299-27322}, year = {2021}, abstract = {With the rise of immersive media, advertisers have started to use 360° commercials to engage and persuade consumers. Two experiments were conducted to address research gaps and to validate the positive impact of 360° commercials in realistic settings. The first study (N = 62) compared the effects of 360° commercials using either a mobile cardboard head-mounted display (HMD) or a laptop. This experiment was conducted in the participants' living rooms and incorporated individual feelings of cybersickness as a moderator. The participants who experienced the 360° commercial with the HMD reported higher spatial presence and product evaluation, but their purchase intentions were only increased when their reported cybersickness was low. The second experiment (N = 197) was conducted online and analyzed the impact of 360° commercials that were experienced with mobile (smartphone/tablet) or static (laptop/desktop) devices instead of HMDs. The positive effects of omnidirectional videos were stronger when participants used mobile devices.}, language = {en} } @techreport{BrischKasslerVestinetal.2023, type = {Working Paper}, author = {Brisch, Fabian and Kassler, Andreas and Vestin, Jonathan and Pieska, Marcus and Amend, Markus}, title = {Accelerating Transport Layer Multipath Packet Scheduling for 5G-ATSSS}, series = {KuVS Fachgespr{\"a}ch - W{\"u}rzburg Workshop on Modeling, Analysis and Simulation of Next-Generation Communication Networks 2023 (WueWoWAS'23)}, journal = {KuVS Fachgespr{\"a}ch - W{\"u}rzburg Workshop on Modeling, Analysis and Simulation of Next-Generation Communication Networks 2023 (WueWoWAS'23)}, doi = {10.25972/OPUS-32205}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-322052}, pages = {4}, year = {2023}, abstract = {Utilizing multiple access networks such as 5G, 4G, and Wi-Fi simultaneously can lead to increased robustness, resiliency, and capacity for mobile users. However, transparently implementing packet distribution over multiple paths within the core of the network faces multiple challenges including scalability to a large number of customers, low latency, and high-capacity packet processing requirements. In this paper, we offload congestion-aware multipath packet scheduling to a smartNIC. However, such hardware acceleration faces multiple challenges due to programming language and platform limitations. We implement different multipath schedulers in P4 with different complexity in order to cope with dynamically changing path capacities. Using testbed measurements, we show that our CMon scheduler, which monitors path congestion in the data plane and dynamically adjusts scheduling weights for the different paths based on path state information, can process more than 3.5 Mpps packets 25 μs latency.}, language = {en} } @article{BuchheimKellerKoetschanetal.2011, author = {Buchheim, Mark A. and Keller, Alexander and Koetschan, Christian and F{\"o}rster, Frank and Merget, Benjamin and Wolf, Matthias}, title = {Internal Transcribed Spacer 2 (nu ITS2 rRNA) Sequence-Structure Phylogenetics: Towards an Automated Reconstruction of the Green Algal Tree of Life}, series = {PLoS ONE}, volume = {6}, journal = {PLoS ONE}, number = {2}, doi = {10.1371/journal.pone.0016931}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-140866}, pages = {e16931}, year = {2011}, abstract = {Background: Chloroplast-encoded genes (matK and rbcL) have been formally proposed for use in DNA barcoding efforts targeting embryophytes. Extending such a protocol to chlorophytan green algae, though, is fraught with problems including non homology (matK) and heterogeneity that prevents the creation of a universal PCR toolkit (rbcL). Some have advocated the use of the nuclear-encoded, internal transcribed spacer two (ITS2) as an alternative to the traditional chloroplast markers. However, the ITS2 is broadly perceived to be insufficiently conserved or to be confounded by introgression or biparental inheritance patterns, precluding its broad use in phylogenetic reconstruction or as a DNA barcode. A growing body of evidence has shown that simultaneous analysis of nucleotide data with secondary structure information can overcome at least some of the limitations of ITS2. The goal of this investigation was to assess the feasibility of an automated, sequence-structure approach for analysis of IT2 data from a large sampling of phylum Chlorophyta. Methodology/Principal Findings: Sequences and secondary structures from 591 chlorophycean, 741 trebouxiophycean and 938 ulvophycean algae, all obtained from the ITS2 Database, were aligned using a sequence structure-specific scoring matrix. Phylogenetic relationships were reconstructed by Profile Neighbor-Joining coupled with a sequence structure-specific, general time reversible substitution model. Results from analyses of the ITS2 data were robust at multiple nodes and showed considerable congruence with results from published phylogenetic analyses. Conclusions/Significance: Our observations on the power of automated, sequence-structure analyses of ITS2 to reconstruct phylum-level phylogenies of the green algae validate this approach to assessing diversity for large sets of chlorophytan taxa. Moreover, our results indicate that objections to the use of ITS2 for DNA barcoding should be weighed against the utility of an automated, data analysis approach with demonstrated power to reconstruct evolutionary patterns for highly divergent lineages.}, language = {en} } @article{BuchinBuchinByrkaetal.2012, author = {Buchin, Kevin and Buchin, Maike and Byrka, Jaroslaw and N{\"o}llenburg, Martin and Okamoto, Yoshio and Silveira, Rodrigo I. and Wolff, Alexander}, title = {Drawing (Complete) Binary Tanglegrams}, series = {Algorithmica}, volume = {62}, journal = {Algorithmica}, doi = {10.1007/s00453-010-9456-3}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-124622}, pages = {309-332}, year = {2012}, abstract = {A binary tanglegram is a drawing of a pair of rooted binary trees whose leaf sets are in one-to-one correspondence; matching leaves are connected by inter-tree edges. For applications, for example, in phylogenetics, it is essential that both trees are drawn without edge crossings and that the inter-tree edges have as few crossings as possible. It is known that finding a tanglegram with the minimum number of crossings is NP-hard and that the problem is fixed-parameter tractable with respect to that number. We prove that under the Unique Games Conjecture there is no constant-factor approximation for binary trees. We show that the problem is NP-hard even if both trees are complete binary trees. For this case we give an O(n 3)-time 2-approximation and a new, simple fixed-parameter algorithm. We show that the maximization version of the dual problem for binary trees can be reduced to a version of MaxCut for which the algorithm of Goemans and Williamson yields a 0.878-approximation.}, language = {en} } @phdthesis{Budig2018, author = {Budig, Benedikt}, title = {Extracting Spatial Information from Historical Maps: Algorithms and Interaction}, edition = {1. Auflage}, publisher = {W{\"u}rzburg University Press}, address = {W{\"u}rzburg}, isbn = {978-3-95826-092-4}, doi = {10.25972/WUP-978-3-95826-093-1}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-160955}, school = {W{\"u}rzburg University Press}, pages = {viii, 160}, year = {2018}, abstract = {Historical maps are fascinating documents and a valuable source of information for scientists of various disciplines. Many of these maps are available as scanned bitmap images, but in order to make them searchable in useful ways, a structured representation of the contained information is desirable. This book deals with the extraction of spatial information from historical maps. This cannot be expected to be solved fully automatically (since it involves difficult semantics), but is also too tedious to be done manually at scale. The methodology used in this book combines the strengths of both computers and humans: it describes efficient algorithms to largely automate information extraction tasks and pairs these algorithms with smart user interactions to handle what is not understood by the algorithm. The effectiveness of this approach is shown for various kinds of spatial documents from the 16th to the early 20th century.}, subject = {Karte}, language = {en} } @article{BugaScholzKumaretal.2012, author = {Buga, Ana-Maria and Scholz, Claus J{\"u}rgen and Kumar, Senthil and Herndon, James G. and Alexandru, Dragos and Cojocaru, Gabriel Radu and Dandekar, Thomas and Popa-Wagner, Aurel}, title = {Identification of New Therapeutic Targets by Genome-Wide Analysis of Gene Expression in the Ipsilateral Cortex of Aged Rats after Stroke}, series = {PLoS One}, volume = {7}, journal = {PLoS One}, number = {12}, doi = {10.1371/journal.pone.0050985}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-130657}, pages = {e50985}, year = {2012}, abstract = {Background: Because most human stroke victims are elderly, studies of experimental stroke in the aged rather than the young rat model may be optimal for identifying clinically relevant cellular responses, as well for pinpointing beneficial interventions. Methodology/Principal Findings: We employed the Affymetrix platform to analyze the whole-gene transcriptome following temporary ligation of the middle cerebral artery in aged and young rats. The correspondence, heat map, and dendrogram analyses independently suggest a differential, age-group-specific behaviour of major gene clusters after stroke. Overall, the pattern of gene expression strongly suggests that the response of the aged rat brain is qualitatively rather than quantitatively different from the young, i.e. the total number of regulated genes is comparable in the two age groups, but the aged rats had great difficulty in mounting a timely response to stroke. Our study indicates that four genes related to neuropathic syndrome, stress, anxiety disorders and depression (Acvr1c, Cort, Htr2b and Pnoc) may have impaired response to stroke in aged rats. New therapeutic options in aged rats may also include Calcrl, Cyp11b1, Prcp, Cebpa, Cfd, Gpnmb, Fcgr2b, Fcgr3a, Tnfrsf26, Adam 17 and Mmp14. An unexpected target is the enzyme 3-hydroxy-3-methylglutaryl-Coenzyme A synthase 1 in aged rats, a key enzyme in the cholesterol synthesis pathway. Post-stroke axonal growth was compromised in both age groups. Conclusion/Significance: We suggest that a multi-stage, multimodal treatment in aged animals may be more likely to produce positive results. Such a therapeutic approach should be focused on tissue restoration but should also address other aspects of patient post-stroke therapy such as neuropathic syndrome, stress, anxiety disorders, depression, neurotransmission and blood pressure.}, language = {en} } @phdthesis{Busch2016, author = {Busch, Stephan}, title = {Robust, Flexible and Efficient Design for Miniature Satellite Systems}, isbn = {978-3-945459-10-2}, doi = {10.25972/OPUS-13652}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-136523}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2016}, abstract = {Small satellites contribute significantly in the rapidly evolving innovation in space engineering, in particular in distributed space systems for global Earth observation and communication services. Significant mass reduction by miniaturization, increased utilization of commercial high-tech components, and in particular standardization are the key drivers for modern miniature space technology. This thesis addresses key fields in research and development on miniature satellite technology regarding efficiency, flexibility, and robustness. Here, these challenges are addressed by the University of Wuerzburg's advanced pico-satellite bus, realizing a generic modular satellite architecture and standardized interfaces for all subsystems. The modular platform ensures reusability, scalability, and increased testability due to its flexible subsystem interface which allows efficient and compact integration of the entire satellite in a plug-and-play manner. Beside systematic design for testability, a high degree of operational robustness is achieved by the consequent implementation of redundancy of crucial subsystems. This is combined with efficient fault detection, isolation and recovery mechanisms. Thus, the UWE-3 platform, and in particular the on-board data handling system and the electrical power system, offers one of the most efficient pico-satellite architectures launched in recent years and provides a solid basis for future extensions. The in-orbit performance results of the pico-satellite UWE-3 are presented and summarize successful operations since its launch in 2013. Several software extensions and adaptations have been uploaded to UWE-3 increasing its capabilities. Thus, a very flexible platform for in-orbit software experiments and for evaluations of innovative concepts was provided and tested.}, subject = {Kleinsatellit}, language = {en} } @phdthesis{Boehler2005, author = {B{\"o}hler, Elmar}, title = {Algebraic closures in complexity theory}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-16106}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2005}, abstract = {We use algebraic closures and structures which are derived from these in complexity theory. We classify problems with Boolean circuits and Boolean constraints according to their complexity. We transfer algebraic structures to structural complexity. We use the generation problem to classify important complexity classes.}, subject = {Komplexit{\"a}tstheorie}, language = {en} } @article{BoehlerCreignouGalotaetal.2012, author = {B{\"o}hler, Elmar and Creignou, Nadia and Galota, Matthias and Reith, Steffen and Schnoor, Henning and Vollmer, Heribert}, title = {Complexity Classifications for Different Equivalence and Audit Problems for Boolean Circuits}, series = {Logical Methods in Computer Science}, volume = {8}, journal = {Logical Methods in Computer Science}, number = {3:27}, doi = {10.2168/LMCS-8(3:27)2012}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-131121}, pages = {1 -- 25}, year = {2012}, abstract = {We study Boolean circuits as a representation of Boolean functions and conskier different equivalence, audit, and enumeration problems. For a number of restricted sets of gate types (bases) we obtain efficient algorithms, while for all other gate types we show these problems are at least NP-hard.}, language = {en} } @article{CaliskanCrouchGiddinsetal.2022, author = {Caliskan, Aylin and Crouch, Samantha A. W. and Giddins, Sara and Dandekar, Thomas and Dangwal, Seema}, title = {Progeria and aging — Omics based comparative analysis}, series = {Biomedicines}, volume = {10}, journal = {Biomedicines}, number = {10}, issn = {2227-9059}, doi = {10.3390/biomedicines10102440}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-289868}, year = {2022}, abstract = {Since ancient times aging has also been regarded as a disease, and humankind has always strived to extend the natural lifespan. Analyzing the genes involved in aging and disease allows for finding important indicators and biological markers for pathologies and possible therapeutic targets. An example of the use of omics technologies is the research regarding aging and the rare and fatal premature aging syndrome progeria (Hutchinson-Gilford progeria syndrome, HGPS). In our study, we focused on the in silico analysis of differentially expressed genes (DEGs) in progeria and aging, using a publicly available RNA-Seq dataset (GEO dataset GSE113957) and a variety of bioinformatics tools. Despite the GSE113957 RNA-Seq dataset being well-known and frequently analyzed, the RNA-Seq data shared by Fleischer et al. is far from exhausted and reusing and repurposing the data still reveals new insights. By analyzing the literature citing the use of the dataset and subsequently conducting a comparative analysis comparing the RNA-Seq data analyses of different subsets of the dataset (healthy children, nonagenarians and progeria patients), we identified several genes involved in both natural aging and progeria (KRT8, KRT18, ACKR4, CCL2, UCP2, ADAMTS15, ACTN4P1, WNT16, IGFBP2). Further analyzing these genes and the pathways involved indicated their possible roles in aging, suggesting the need for further in vitro and in vivo research. In this paper, we (1) compare "normal aging" (nonagenarians vs. healthy children) and progeria (HGPS patients vs. healthy children), (2) enlist genes possibly involved in both the natural aging process and progeria, including the first mention of IGFBP2 in progeria, (3) predict miRNAs and interactomes for WNT16 (hsa-mir-181a-5p), UCP2 (hsa-mir-26a-5p and hsa-mir-124-3p), and IGFBP2 (hsa-mir-124-3p, hsa-mir-126-3p, and hsa-mir-27b-3p), (4) demonstrate the compatibility of well-established R packages for RNA-Seq analysis for researchers interested but not yet familiar with this kind of analysis, and (5) present comparative proteomics analyses to show an association between our RNA-Seq data analyses and corresponding changes in protein expression.}, language = {en} } @article{CarolusWienrichToerkeetal.2021, author = {Carolus, Astrid and Wienrich, Carolin and T{\"o}rke, Anna and Friedel, Tobias and Schwietering, Christian and Sperzel, Mareike}, title = {'Alexa, I feel for you!' Observers' empathetic reactions towards a conversational agent}, series = {Frontiers in Computer Science}, volume = {3}, journal = {Frontiers in Computer Science}, doi = {10.3389/fcomp.2021.682982}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-258807}, year = {2021}, abstract = {Conversational agents and smart speakers have grown in popularity offering a variety of options for use, which are available through intuitive speech operation. In contrast to the standard dyad of a single user and a device, voice-controlled operations can be observed by further attendees resulting in new, more social usage scenarios. Referring to the concept of 'media equation' and to research on the idea of 'computers as social actors,' which describes the potential of technology to trigger emotional reactions in users, this paper asks for the capacity of smart speakers to elicit empathy in observers of interactions. In a 2 × 2 online experiment, 140 participants watched a video of a man talking to an Amazon Echo either rudely or neutrally (factor 1), addressing it as 'Alexa' or 'Computer' (factor 2). Controlling for participants' trait empathy, the rude treatment results in participants' significantly higher ratings of empathy with the device, compared to the neutral treatment. The form of address had no significant effect. Results were independent of the participants' gender and usage experience indicating a rather universal effect, which confirms the basic idea of the media equation. Implications for users, developers and researchers were discussed in the light of (future) omnipresent voice-based technology interaction scenarios.}, language = {en} }