@phdthesis{Gorelashvili2019, author = {Gorelashvili, Maximilian Georg}, title = {Investigation of megakaryopoiesis and the acute phase of ischemic stroke by advanced fluorescence microscopy}, doi = {10.25972/OPUS-18600}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-186002}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2019}, abstract = {In mammals, anucleate platelets circulate in the blood flow and are primarily responsible for maintaining functional hemostasis. Platelets are generated in the bone marrow (BM) by megakaryocytes (MKs), which mainly reside directly next to the BM sinusoids to release proplatelets into the blood. MKs originate from hematopoietic stem cells and are thought to migrate from the endosteal to the vascular niche during their maturation, a process, which is, despite being intensively investigated, still not fully understood. Long-term intravital two photon microscopy (2PM) of MKs and vasculature in murine bone marrow was performed and mean squared displacement analysis of cell migration was performed. The MKs exhibited no migration, but wobbling-like movement on time scales of 3 h. Directed cell migration always results in non-random spatial distribution. Thus, a computational modelling algorithm simulating random MK distribution using real 3D light-sheet fluorescence microscopy data sets was developed. Direct comparison of real and simulated random MK distributions showed, that MKs exhibit a strong bias to vessel-contact. However, this bias is not caused by cell migration, as non-vessel-associated MKs were randomly distributed in the intervascular space. Furthermore, simulation studies revealed that MKs strongly impair migration of other cells in the bone marrow by acting as large-sized obstacles. MKs are thought to migrate from the regions close to the endosteum towards the vasculature during their maturation process. MK distribution as a function of their localization relative to the endosteal regions of the bones was investigated by light sheet fluorescence microscopy (LSFM). The results show no bone-region dependent distribution of MKs. Taken together, the newly established methods and obtained results refute the model of MK migration during their maturation. Ischemia reperfusion (I/R) injury is a frequent complication of cerebral ischemic stroke, where brain tissue damage occurs despite successful recanalization. Platelets, endothelial cells and immune cells have been demonstrated to affect the progression of I/R injury in experimental mouse models 24 h after recanalization. However, the underlying Pathomechanisms, especially in the first hours after recanalization, are poorly understood. Here, LSFM, 2PM and complemental advanced image analysis workflows were established for investigation of platelets, the vasculature and neutrophils in ischemic brains. Quantitative analysis of thrombus formation in the ipsilateral and contralateral hemispheres at different time points revealed that platelet aggregate formation is minimal during the first 8 h after recanalization and occurs in both hemispheres. Considering that maximal tissue damage already is present at this time point, it can be concluded that infarct progression and neurological damage do not result from platelet aggregated formation. Furthermore, LSFM allowed to confirm neutrophil infiltration into the infarcted hemisphere and, here, the levels of endothelial cell marker PECAM1 were strongly reduced. However, further investigations must be carried out to clearly identify the role of neutrophils and the endothelial cells in I/R injury.}, subject = {Fluoreszenzmikroskopie}, language = {en} } @phdthesis{Baier2018, author = {Baier, Pablo A.}, title = {Simulator for Minimally Invasive Vascular Interventions: Hardware and Software}, isbn = {978-3-945459-22-5}, doi = {10.25972/OPUS-16119}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-161190}, school = {Universit{\"a}t W{\"u}rzburg}, pages = {118}, year = {2018}, abstract = {A complete simulation system is proposed that can be used as an educational tool by physicians in training basic skills of Minimally Invasive Vascular Interventions. In the first part, a surface model is developed to assemble arteries having a planar segmentation. It is based on Sweep Surfaces and can be extended to T- and Y-like bifurcations. A continuous force vector field is described, representing the interaction between the catheter and the surface. The computation time of the force field is almost unaffected when the resolution of the artery is increased. The mechanical properties of arteries play an essential role in the study of the circulatory system dynamics, which has been becoming increasingly important in the treatment of cardiovascular diseases. In Virtual Reality Simulators, it is crucial to have a tissue model that responds in real time. In this work, the arteries are discretized by a two dimensional mesh and the nodes are connected by three kinds of linear springs. Three tissue layers (Intima, Media, Adventitia) are considered and, starting from the stretch-energy density, some of the elasticity tensor components are calculated. The physical model linearizes and homogenizes the material response, but it still contemplates the geometric nonlinearity. In general, if the arterial stretch varies by 1\% or less, then the agreement between the linear and nonlinear models is trustworthy. In the last part, the physical model of the wire proposed by Konings is improved. As a result, a simpler and more stable method is obtained to calculate the equilibrium configuration of the wire. In addition, a geometrical method is developed to perform relaxations. It is particularly useful when the wire is hindered in the physical method because of the boundary conditions. The physical and the geometrical methods are merged, resulting in efficient relaxations. Tests show that the shape of the virtual wire agrees with the experiment. The proposed algorithm allows real-time executions and the hardware to assemble the simulator has a low cost.}, subject = {Computersimulation}, language = {en} } @phdthesis{Weber2016, author = {Weber, Stefan}, title = {Simulation Studies on the New Small Wheel Shielding of the ATLAS Experiment and Design and Construction of a Test Facility for Gaseous Detectors}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-133084}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2016}, abstract = {In this thesis two main projects are presented, both aiming at the overall goal of particle detector development. In the first part of the thesis detailed shielding studies are discussed, focused on the shielding section of the planned New Small Wheel as part of the ATLAS detector upgrade. Those studies supported the discussions within the upgrade community and decisions made on the final design of the New Small Wheel. The second part of the thesis covers the design, construction and functional demonstration of a test facility for gaseous detectors at the University of W{\"u}rzburg. Additional studies on the trigger system of the facility are presented. Especially the precision and reliability of reference timing signals were investigated.}, subject = {Teilchendetektor}, language = {en} } @unpublished{Reiss2012, author = {Reiss, Harald}, title = {Physical time and existence of time holes in non-transparent media}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-67268}, year = {2012}, abstract = {The analysis presented in this paper applies to experimental situations where observers or objects to be studied (both stationary, with respect to each other) are located in environments the optical thickness of which is strongly different. By their large optical thickness, non-transparent media are clearly distinguished from their transparent counterparts. Non-transparent media comprise thin metallic films, packed or fluidised beds, the Earth's crust, and even dark clouds and other cosmological objects. As a representative example, a non-transparent slab is subjected to transient disturbances, and a rigorous analysis is presented whether physical time reasonably could be constructed under such condition. The analysis incorporates mapping functions that correlate physical events, e, in non-transparent media, with their images, f(e), tentatively located on a standard physical time scale. The analysis demonstrates, however, that physical time, in its rigorous sense, does not exist under non-transparency conditions. A proof of this conclusion is attempted in three steps: i) the theorem "there is no time without space and events" is accepted, (ii) images f[e(s,t)] do not constitute a dense, uncountably infinite set, and (iii) sets of images that are not uncountably infinite do not create physical time but only time-like sequences. As a consequence, mapping f[e(s,t)] in non-transparent space does not create physical analogues to the mathematical structure of the ordered, dense half-set R+ of real numbers, and reverse mapping, f-1f[e(s,t)] would not allow unique identification and reconstruction of original events from their images. In these cases, causality and determinism, as well as invariance of physical processes under time reversal, might be violated. Existence of time holes could be possible, as follows from the sequence of images, f[e(s,t)], that is not uncountably infinite, in contrast to R+. Practical impacts are expected for understanding physical diffusion-like, radiative transfer processes, stability models to protect superconductors against quenchs or for description of their transient local pair density and critical currents. Impacts would be expected also in mathematical formulations (differential equations) of classical physics, in relativity and perhaps in quantum mechanics, all as far as transient processes in non-transparent space would be concerned. An interesting problem is whether temporal cloaking (a time hole) in a transparent medium, as very recently reported in the literature, can be explained by the present analysis. The analysis is not restricted to objects of laboratory dimensions: Because of obviously existing radiation transfer analogues, it is tempting to discuss consequences also for much larger structures in particular if an origin of time is postulated.}, subject = {Strahlungstransport}, language = {en} } @article{GrosHovestadtPoethke2006, author = {Gros, Andreas and Hovestadt, Thomas and Poethke, Hans Joachim}, title = {Evolution of local adaptions in dispersal strategies}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-45406}, year = {2006}, abstract = {The optimal probability and distance of dispersal largely depend on the risk to end up in unsuitable habitat. This risk is highest close to the habitat's edge and consequently, optimal dispersal probability and distance should decline towards the habitat's border. This selection should lead to the emergence of spatial gradients in dispersal strategies. However, gene flow caused by dispersal itself is counteracting local adaptation. Using an individual based model we investigate the evolution of local adaptations of dispersal probability and distance within a single, circular, habitat patch. We compare evolved dispersal probabilities and distances for six different dispersal kernels (two negative exponential kernels, two skewed kernels, nearest neighbour dispersal and global dispersal) in patches of different size. For all kernels a positive correlation between patch size and dispersal probability emerges. However, a minimum patch size is necessary to allow for local adaptation of dispersal strategies within patches. Beyond this minimum patch area the difference in mean dispersal distance between center and edge increases linearly with patch radius, but the intensity of local adaptation depends on the dispersal kernel. Except for global and nearest neighbour dispersal, the evolved spatial pattern are qualitatively similar for both, mean dispersal probability and distance. We conclude, that inspite of the gene-flow originating from dispersal local adaptation of dispersal strategies is possible if a habitat is of sufficient size. This presumably holds for any realistic type of dispersal kernel.}, subject = {Ausbreitung}, language = {en} } @article{PoethkeHovestadtMitesser2003, author = {Poethke, Hans-Joachim and Hovestadt, Thomas and Mitesser, Oliver}, title = {Local extinction and the evolution of dispersal rates: Causes and correlations}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-47718}, year = {2003}, abstract = {We present the results of individual-based simulation experiments on the evolution of dispersal rates of organisms living in metapopulations. We find conflicting results regarding the relationship between local extinction rate and evolutionarily stable (ES) dispersal rate depending on which principal mechanism causes extinction: if extinction is caused by environmental catastrophes eradicating local populations, we observe a positive correlation between extinction and ES dispersal rate; if extinction is a consequence of stochastic local dynamics and environmental fluctuations, the correlation becomes ambiguous; and in cases where extinction is caused by dispersal mortality, a negative correlation between local extinction rate and ES dispersal rate emerges. We conclude that extinction rate, which both affects and is affected by dispersal rates, is not an ideal predictor for optimal dispersal rates.}, subject = {Ausbreitung}, language = {en} } @phdthesis{Schumm2009, author = {Schumm, Irene}, title = {Lessons Learned From Germany's 2001-2006 Labor Market Reforms}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-43705}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2009}, abstract = {In der Dissertation werden die Gesetze zur Reform des Arbeitsmarktes in Deutschland, besser bekannt als Hartz-Reformen, untersucht. Zun{\"a}chst wird ein {\"U}berblick {\"u}ber die wichtigsten {\"A}nderungen aus den vier Reform-Paketen gegeben sowie die Effekte, welche man sich davon versprach. Des Weiteren werden zwei grundlegende Reformmaßnahmen, n{\"a}mlich die Zusammenlegung der Arbeitslosen- und Sozialhilfe (Hartz IV) sowie die Verk{\"u}rzung der Bezugsdauer der Arbeitslosenversicherungsleistung, analysiert, um deren Auswirkungen auf das individuelle Verhalten und die aggregierte {\"O}konomie zu evaluieren. Diese Untersuchung geschieht im Rahmen eines Matching-Modells mit optimaler verweildauerabh{\"a}ngiger Suchleistung. Mit Hilfe von Semi-Markov-Methoden, deren Anwendung in der Arbeitsmarkttheorie beschrieben wird, findet schließlich eine Aggregierung statt. Auf diese Weise k{\"o}nnen die Auswirkungen der Hartz-IV-Reformen auf die Verweildauer in Arbeitslosigkeit, die optimale Suchleistung und die Arbeitslosigkeit quantifiziert werden.}, subject = {Hartz-Reform}, language = {en} } @phdthesis{Oeffner2008, author = {Oeffner, Marc}, title = {AGENT-BASED KEYNESIAN MACROECONOMICS - An Evolutionary Model Embedded in an Agent-Based Computer Simulation}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-39277}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2008}, abstract = {Subject of the present study is the agent-based computer simulation of Agent Island. Agent Island is a macroeconomic model, which belongs to the field of monetary theory. Agent-based modeling is an innovative tool that made much progress in other scientific fields like medicine or logistics. In economics this tool is quite new, and in monetary theory to this date virtual no agent-based simulation model has been developed. It is therefore the topic of this study to close this gap to some extend. Hence, the model integrates in a straightforward way next to the common private sectors (i.e. households, consumer goods firms and capital goods firms) and as an innovation a banking system, a central bank and a monetary circuit. Thereby, the central bank controls the business cycle via an interest rate policy; the according mechanism builds on the seminal idea of Knut Wicksell (natural rate of interest vs. money rate of interest). In addition, the model contains also many Keynesian features and a flow-of-funds accounting system in the tradition of Wolfgang St{\"u}tzel. Importantly, one objective of the study is the validation of Agent Island, which means that the individual agents (i.e. their rules, variables and parameters) are adjusted in such a way that on the aggregate level certain phenomena emerge. The crucial aspect of the modeling and the validation is therefore the relation between the micro and macro level: Every phenomenon on the aggregate level (e.g. some stylized facts of the business cycle, the monetary transmission mechanism, the Phillips curve relationship, the Keynesian paradox of thrift or the course of the business cycle) emerges out of individual actions and interactions of the many thousand agents on Agent Island. In contrast to models comprising a representative agent, we do not apply a modeling on the aggregate level; and in contrast to orthodox GE models, true interaction between heterogeneous agents takes place (e.g. by face-to-face-trading).}, subject = {Mehragentensystem}, language = {en} } @phdthesis{Wenisch2008, author = {Wenisch, Jan}, title = {Ferromagnetic (Ga,Mn)As Layers and Nanostructures: Control of Magnetic Anisotropy by Strain Engineering}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-34552}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2008}, abstract = {This work studies the fundamental connection between lattice strain and magnetic anisotropy in the ferromagnetic semiconductor (Ga,Mn)As. The first chapters provide a general introduction into the material system and a detailed description of the growth process by molecular beam epitaxy. A finite element simulation formalism is developed to model the strain distribution in (Ga,Mn)As nanostructures is introduced and its predictions verified by high-resolution x-ray diffraction methods. The influence of lattice strain on the magnetic anisotropy is explained by an magnetostatic model. A possible device application is described in the closing chapter.}, subject = {Magnetischer Halbleiter}, language = {en} } @phdthesis{Hupp2008, author = {Hupp, Markus}, title = {Simulating Star Formation and Turbulence in Models of Isolated Disk Galaxies}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-34510}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2008}, abstract = {We model Milky Way like isolated disk galaxies in high resolution three-dimensional hydrodynamical simulations with the adaptive mesh refinement code Enzo. The model galaxies include a dark matter halo and a disk of gas and stars. We use a simple implementation of sink particles to measure and follow collapsing gas, and simulate star formation as well as stellar feedback in some cases. We investigate two largely different realizations of star formation. Firstly, we follow the classical approach to transform cold, dense gas into stars with an fixed efficiency. These kind of simulations are known to suffer from an overestimation of star formation and we observe this behavior as well. Secondly, we use our newly developed FEARLESS approach to combine hydrodynamical simulations with a semi-analytic modeling of unresolved turbulence and use this technique to dynamically determine the star formation rate. The subgrid-scale turbulence regulated star formation simulations point towards largely smaller star formation efficiencies and henceforth more realistic overall star formation rates. More work is necessary to extend this method to account for the observed highly supersonic turbulence in molecular clouds and ultimately use the turbulence regulated algorithm to simulate observed star formation relations.}, subject = {Astrophysik}, language = {en} }