@phdthesis{Zink2024, author = {Zink, Johannes}, title = {Algorithms for Drawing Graphs and Polylines with Straight-Line Segments}, doi = {10.25972/OPUS-35475}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-354756}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2024}, abstract = {Graphs provide a key means to model relationships between entities. They consist of vertices representing the entities, and edges representing relationships between pairs of entities. To make people conceive the structure of a graph, it is almost inevitable to visualize the graph. We call such a visualization a graph drawing. Moreover, we have a straight-line graph drawing if each vertex is represented as a point (or a small geometric object, e.g., a rectangle) and each edge is represented as a line segment between its two vertices. A polyline is a very simple straight-line graph drawing, where the vertices form a sequence according to which the vertices are connected by edges. An example of a polyline in practice is a GPS trajectory. The underlying road network, in turn, can be modeled as a graph. This book addresses problems that arise when working with straight-line graph drawings and polylines. In particular, we study algorithms for recognizing certain graphs representable with line segments, for generating straight-line graph drawings, and for abstracting polylines. In the first part, we first examine, how and in which time we can decide whether a given graph is a stick graph, that is, whether its vertices can be represented as vertical and horizontal line segments on a diagonal line, which intersect if and only if there is an edge between them. We then consider the visual complexity of graphs. Specifically, we investigate, for certain classes of graphs, how many line segments are necessary for any straight-line graph drawing, and whether three (or more) different slopes of the line segments are sufficient to draw all edges. Last, we study the question, how to assign (ordered) colors to the vertices of a graph with both directed and undirected edges such that no neighboring vertices get the same color and colors are ascending along directed edges. Here, the special property of the considered graph is that the vertices can be represented as intervals that overlap if and only if there is an edge between them. The latter problem is motivated by an application in automated drawing of cable plans with vertical and horizontal line segments, which we cover in the second part. We describe an algorithm that gets the abstract description of a cable plan as input, and generates a drawing that takes into account the special properties of these cable plans, like plugs and groups of wires. We then experimentally evaluate the quality of the resulting drawings. In the third part, we study the problem of abstracting (or simplifying) a single polyline and a bundle of polylines. In this problem, the objective is to remove as many vertices as possible from the given polyline(s) while keeping each resulting polyline sufficiently similar to its original course (according to a given similarity measure).}, subject = {Graphenzeichnen}, language = {en} } @article{TranGiaWechBleyetal.2015, author = {Tran-Gia, Johannes and Wech, Tobias and Bley, Thorsten and K{\"o}stler, Herbert}, title = {Model-Based Acceleration of Look-Locker T1 Mapping}, series = {PLoS One}, volume = {10}, journal = {PLoS One}, number = {4}, doi = {10.1371/journal.pone.0122611}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-126436}, pages = {e0122611}, year = {2015}, abstract = {Mapping the longitudinal relaxation time \(T_1\) has widespread applications in clinical MRI as it promises a quantitative comparison of tissue properties across subjects and scanners. Due to the long scan times of conventional methods, however, the use of quantitative MRI in clinical routine is still very limited. In this work, an acceleration of Inversion-Recovery Look-Locker (IR-LL) \(T_1\) mapping is presented. A model-based algorithm is used to iteratively enforce an exponential relaxation model to a highly undersampled radially acquired IR-LL dataset obtained after the application of a single global inversion pulse. Using the proposed technique, a \(T_1\) map of a single slice with 1.6mm in-plane resolution and 4mm slice thickness can be reconstructed from data acquired in only 6s. A time-consuming segmented IR experiment was used as gold standard for \(T_1\) mapping in this work. In the subsequent validation study, the model-based reconstruction of a single-inversion IR-LL dataset exhibited a \(T_1\) difference of less than 2.6\% compared to the segmented IR-LL reference in a phantom consisting of vials with \(T_1\) values between 200ms and 3000ms. In vivo, the \(T_1\) difference was smaller than 5.5\% in WM and GM of seven healthy volunteers. Additionally, the \(T_1\) values are comparable to standard literature values. Despite the high acceleration, all model-based reconstructions were of a visual quality comparable to fully sampled references. Finally, the reproducibility of the \(T_1\) mapping method was demonstrated in repeated acquisitions. In conclusion, the presented approach represents a promising way for fast and accurate \(T_1\) mapping using radial IR-LL acquisitions without the need of any segmentation.}, language = {en} } @article{RoelofsBlackburnLindahletal.2023, author = {Roelofs, Freek and Blackburn, Lindy and Lindahl, Greg and Doeleman, Sheperd S. and Johnson, Michael D. and Arras, Philipp and Chatterjee, Koushik and Emami, Razieh and Fromm, Christian and Fuentes, Antonio and Knollm{\"u}ller, Jakob and Kosogorov, Nikita and M{\"u}ller, Hendrik and Patel, Nimesh and Raymond, Alexander and Tiede, Paul and Traianou, Efthalia and Vega, Justin}, title = {The ngEHT analysis challenges}, series = {Galaxies}, volume = {11}, journal = {Galaxies}, number = {1}, issn = {2075-4434}, doi = {10.3390/galaxies11010012}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-304976}, year = {2023}, abstract = {The next-generation Event Horizon Telescope (ngEHT) will be a significant enhancement of the Event Horizon Telescope (EHT) array, with ∼10 new antennas and instrumental upgrades of existing antennas. The increased uv-coverage, sensitivity, and frequency coverage allow a wide range of new science opportunities to be explored. The ngEHT Analysis Challenges have been launched to inform the development of the ngEHT array design, science objectives, and analysis pathways. For each challenge, synthetic EHT and ngEHT datasets are generated from theoretical source models and released to the challenge participants, who analyze the datasets using image reconstruction and other methods. The submitted analysis results are evaluated with quantitative metrics. In this work, we report on the first two ngEHT Analysis Challenges. These have focused on static and dynamical models of M87* and Sgr A* and shown that high-quality movies of the extended jet structure of M87* and near-horizon hourly timescale variability of Sgr A* can be reconstructed by the reference ngEHT array in realistic observing conditions using current analysis algorithms. We identify areas where there is still room for improvement of these algorithms and analysis strategies. Other science cases and arrays will be explored in future challenges.}, language = {en} } @article{McIlroyPassfieldHolmbergetal.2021, author = {McIlroy, Benjamin and Passfield, Louis and Holmberg, Hans-Christer and Sperlich, Billy}, title = {Virtual training of endurance cycling - A summary of strengths, weaknesses, opportunities and threats}, series = {Frontiers in Sports and Active Living}, volume = {3}, journal = {Frontiers in Sports and Active Living}, doi = {10.3389/fspor.2021.631101}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-258876}, year = {2021}, abstract = {Virtual online training has emerged as one of the top 20 worldwide fitness trends for 2021 and continues to develop rapidly. Although this allows the cycling community to engage in virtual training and competition, critical evaluation of virtual training platforms is limited. Here, we discuss the strengths, weaknesses, opportunities and threats associated with virtual training technology and cycling in an attempt to enhance awareness of such aspects. Strengths include immersive worlds, innovative drafting mechanics, and versatility. Weaknesses include questionable data accuracy, inadequate strength and reliability of power-speed algorithms. Opportunities exist for expanding strategic partnerships with major cycling races, brands, and sponsors and improving user experience with the addition of video capture and "e-coaching." Threats are present in the form of cheating during competition, and a lack of uptake and acceptance by a broader community.}, language = {en} } @article{ElsebergBorrmannNuechter2013, author = {Elseberg, Jan and Borrmann, Dorit and N{\"u}chter, Andreas}, title = {Algorithmic Solutions for Computing Precise Maximum Likelihood 3D Point Clouds from Mobile Laser Scanning Platforms}, series = {Remote Sensing}, volume = {5}, journal = {Remote Sensing}, number = {11}, doi = {10.3390/rs5115871}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-130478}, pages = {5871-5906}, year = {2013}, abstract = {Mobile laser scanning puts high requirements on the accuracy of the positioning systems and the calibration of the measurement system. We present a novel algorithmic approach for calibration with the goal of improving the measurement accuracy of mobile laser scanners. We describe a general framework for calibrating mobile sensor platforms that estimates all configuration parameters for any arrangement of positioning sensors, including odometry. In addition, we present a novel semi-rigid Simultaneous Localization and Mapping (SLAM) algorithm that corrects the vehicle position at every point in time along its trajectory, while simultaneously improving the quality and precision of the entire acquired point cloud. Using this algorithm, the temporary failure of accurate external positioning systems or the lack thereof can be compensated for. We demonstrate the capabilities of the two newly proposed algorithms on a wide variety of datasets.}, language = {en} }