@phdthesis{Grub2005, author = {Grub, Martin}, title = {Verteilungswirkungen anreizorientierter Sozialpolitik : das deutsche Rentenversicherungs- und Steuersystem in der Perspektive dynamischer Lebenszyklusmodelle}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-16163}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2005}, abstract = {Drei große Reformenpakete und eine Reihe kleinerer Begleitmaßnahmen pr{\"a}gen das renten¬politische Erbe der rot-gr{\"u}nen Bundesregierung. Einerseits greifen sie Trends in der Reformpolitik seit Beginn der 90er Jahre auf. So verst{\"a}rkt die Rentenstrukturreform 2001 beispielsweise die rentenrechtliche Anerkennung von Erziehung und Pflege. Eine ver{\"a}nderte Rentenformel wird die Transitionslasten des demographischen {\"U}bergangs mittelfristig gleichm{\"a}ßiger {\"u}ber die Jahrg{\"a}nge verteilen - durch eine Eind{\"a}mmung des Beitrags¬satz¬anstiegs auf Kosten eines langsamer wachsenden Rentenniveaus. Die Nachhaltig¬keitsreform 2004 verst{\"a}rkt diesen Mechanismus auf der Grundlage neuer empirischer Erkenntnisse. Auch der {\"U}bergang zur {\"u}berwiegend nachgelagerten Besteuerung mit dem Alterseink{\"u}nftegesetz 2004 wirkt in diese Richtung durch eine wachsende steuerliche Absetz¬barkeit der Alters¬sicherungsbeitr{\"a}ge bei konsekutiver Einbeziehung der Renten in die Besteuerung. Auf der anderen Seite leiten die Reformen nichts Geringeres als einen tief greifenden Paradigmen¬wechsel ein: den langfristigen {\"U}bergang eines umlagefinanzierten Pflichtversicherungs¬- zu einem Drei-S{\"a}ulen-System, in dem Zulagen und Steuerabzugs¬mechanismen Anreize zur freiwilligen Erg{\"a}nzungsvorsorge in kapitalgedeckten Sicherungs¬instrumenten bilden. F{\"u}r die wissenschaftliche Gesetzesfolgenabsch{\"a}tzung stellen diese Reformen eine enorme Herausforderung dar. Es ist das Moment der Freiheit, das in jedweder kausalen Welt Verantwortung impliziert, und die politische Folgenabsch{\"a}tzung spannend und schwierig macht. Die {\"o}konomische Fachliteratur hat Mikrosimulationsmodelle als ein leistungsf{\"a}higes Analysewerkzeug entwickelt, fiskalische und distributive Konsequenzen "f{\"u}r den Tag danach" sehr detailliert absch{\"a}tzen zu k{\"o}nnen - ohne dabei Verhaltensreaktionen zu ber{\"u}cksichtigen. Verhaltensreaktionen hingegen stehen im Mittelpunkt der rasant wachsenden Literatur zu numerischen Gleichgewichtsmodellen. Angesichts begrenzter Rechenressourcen vereinfachen diese Modelle in der Regel die Risikostruktur des {\"o}konomischen Entscheidungsproblems, um sich auf wenige Zustands- und Entscheidungsvariablen beschr{\"a}nken zu k{\"o}nnen. Sie abstrahieren h{\"a}ufig von Unstetigkeiten in Budgetrestriktionen und konzentrieren sich auf station{\"a}re Zustandstransitionen. Viele dieser Instrumente sind daher wenig geeignet abzusch{\"a}tzen, wie sich Menschen an eine Reformpolitik anpassen, die lange {\"U}bergangs¬pfade vorsieht {\"u}ber mehrdimensionale, zeitinvariate Risikostrukturen, deren imperfekte Korrelationen zu einer risikodiversifizierenden Vorsorgestrategie genutzt werden kann. Das vorliegende Buch stellt ein dynamisch stochastisches Simulationsmodell im partiellen Erwartungsgleichgewicht vor. Sein Ziel ist, Anreize in der komplexen Interaktion der Reformen mit dem umfangreichen Regulierungsrahmen in einer risikoreichen Umwelt zu identifizieren. Die einzelnen Reformen, ihre algorithmische Abbildung in das dynamische Entscheidungsmodell und die partiellen Wirkungsmechanismen sind detailliert erl{\"a}utert. Eines der Hauptergebnisse zeigt sich {\"u}berraschender Weise darin, die beobachtbare Zur{\"u}ck¬haltung niedrigerer Einkommensklassen gegen{\"u}ber den neuen Sicherungs¬instrumenten ohne R{\"u}ckgriff auf (im {\"o}kokomischen Sinne) eingeschr{\"a}nkte Rationalit{\"a}ten erkl{\"a}ren zu k{\"o}nnen. Das Modell l{\"a}sst insbesondere in mittleren Lebenseinkommensbereichen hohe Erg{\"a}nzungs¬versicherungsraten erwarten - trotz der "u"-f{\"o}rmigen F{\"o}rderquoten in statischer Quer¬schnitts¬betrachtung. Diese auf den ersten Blickt wenig intuitive Eigenschaft des Modells l{\"a}sst sich im Gesamtkontext des Alterssicherungs- und Steuersystems {\"u}ber den erwarteten Lebenshorizont erkl{\"a}ren. Das Simulationsmodell wurde am Fraunhofer-Institut f{\"u}r Angewandte Informationstechnik FIT entwickelt und wird gegenw{\"a}rtig beim Verband der Rentenversicherungstr{\"a}ger (VDR) angewandt. Ein großer Dank gilt der finanziellen F{\"o}rderung durch das Forschungsnetzwerk Alterssicherung (FNA) beim VDR.}, subject = {Deutschland}, language = {de} } @phdthesis{Ganz2008, author = {Ganz, Verena}, title = {A comprehensive approach for currency crises theories stressing the role of the anchor country}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-26853}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2008}, abstract = {The approach is based on the finding that new generations of currency crises theories always had developed ex post after popular currency crises. Discussing the main theories of currency crises shows their disparity: The First Generation of currency crises models argues based on the assumption of a chronic budget deficit that is being monetized by the domestic central bank. The result is a trade-off between an expansionary monetary policy that is focused on the internal economic balance and a fixed exchange rate which is depending on the rules of interest parity and purchasing power parity. This imbalance inevitably results in a currency crisis. Altogether, this theory argues with a disrupted external balance on the foreign exchange market. Second Generation currency crises models on the other side focus on the internal macroeconomic balance. The stability of a fixed exchange rate is depending on the economic benefit of the exchange rate system in relation to the social costs of maintaining it. As soon as social costs are increasing and showing up in deteriorating fundamentals, this leads to a speculative attack on the fixed exchange rate system. The term Third Generation of currency crises finally summarizes a variety of currency crises theories. These are also arguing psychologically to explain phenomena as contagion and spill-over effects to rationalize crises detached from the fundamental situation. Apart from the apparent inconsistency of the main theories of currency crises, a further observation is that these explanations focus on the crisis country only while international monetary transmission effects are left out of consideration. These however are a central parameter for the stability of fixed exchange rate systems, in exchange rate theory as well as in empirical observations. Altogether, these findings provide the motivation for developing a theoretical approach which integrates the main elements of the different generations of currency crises theories and which integrates international monetary transmission. Therefore a macroeconomic approach is chosen applying the concept of the Monetary Conditions Index (MCI), a linear combination of the real interest rate and the real exchange rate. This index firstly is extended for international monetary influences and called MCIfix. MCIfix illustrates the monetary conditions required for the stability of a fixed exchange rate system. The central assumption of this concept is that the uncovered interest parity is maintained. The main conclusion is that the MCIfix only depends on exogenous parameters. In a second step, the analysis integrates the monetary policy requirements for achieving an internal macroeconomic stability. By minimizing a loss function of social welfare, a MCI is derived which pictures the economically optimal monetary policy MCIopt. Instability in a fixed exchange rate system occurs as soon as the monetary conditions for an internal and external balance are deviating. For discussing macroeconomic imbalances, the central parameters determining the MCIfix (and therefore the relation of MCIfix to MCIopt) are discussed: the real interest rate of the anchor country, the real effective exchange rate and a risk premium. Applying this theory framework, four constellations are discussed where MCIfix and MCIopt fall apart in order to show the central bank's possibilities for reacting and the consequences of that behaviour. The discussion shows that the integrative approach manages to incorporate the central elements of traditional currency crises theories and that it includes international monetary transmission instead of reducing the discussion on an inconsistent domestic monetary policy. The theory framework for fixed exchange rates is finally applied in four case studies: the currency crises in Argentina, the crisis in the Czech Republic, the Asian currency crisis and the crisis of the European Monetary System. The case studies show that the developed monetary framework achieves integration of different generations of crises theories and that the monetary policy of the anchor country plays a decisive role in destabilising fixed exchange rate systems.}, subject = {Devisenmarkt}, language = {en} } @phdthesis{Ahn2007, author = {Ahn, Young-Cheul}, title = {Umweltverschmutzung als l{\"a}nder{\"u}bergreifendes Problem am Beispiel der globalen Erw{\"a}rmung}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-22860}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2007}, abstract = {Diese Arbeit versucht, die wirtschaftliche Bedeutung der globalen Erw{\"a}rmung zu erkl{\"a}ren und die L{\"o}sung dieses Problems zu finden. Die globale Erw{\"a}rmung als l{\"a}nder{\"u}bergreifendes Problems kann durch das marktwirtschaftliche Preissystem gel{\"o}st werden, besonders durch den internationalen Handel der Schadstoffemission. Hierbei wird die dezentrale L{\"o}sungsprinzip betont. Die globale Erw{\"a}rmung und Politik f{\"u}r die L{\"o}sung dieses Problem sind dauerhaft. Daher wird das intertemporale Wachstumsmodell zur Berechnung des Gewinns und der Kosten der Politik verlangt. Dabei wird ein Prinzip besagt, dass jede Generation verantwortlich auf ihre Generation ist. In dieser Arbeit wird versucht, die optimale Handelspolitik und die Kyoto-Politik zu vergleichen.}, language = {de} } @phdthesis{Mayer2006, author = {Mayer, Eric}, title = {New Keynesian Perspectives on Monetary Policy and the Business Cycle in Closed Economies and Monetary Unions}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-19499}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2006}, abstract = {No abstract available}, subject = {Geldtheorie}, language = {en} } @phdthesis{Kleer2008, author = {Kleer, Robin}, title = {Three Essays on Competition Policy and Innovation Incentives}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-34763}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2008}, abstract = {Diese Dissertation befasst sich mit dem Thema Innovations{\"o}konomik. In einer allgemeinen Einf{\"u}hrung werden wettbewerbspolitische Gesichtspunkte, die Innovationsanreize von Firmen beeinflussen, dargestellt. In drei einzelnen Arbeiten werden dann spezielle Fragestellungen intensiver analysiert. Die erste Arbeit behandelt die Wechselwirkungen von Firmenzusammenschl{\"u}ssen und Innovationen, zwei zentrale Elemente der Wettbewerbsstrategie von Unternehmen. Der Schwerpunkt der Arbeit liegt dabei auf dem Einfluss von Firmenzusammenschl{\"u}ssen auf die Innovationsaktivit{\"a}ten und den Wettbewerb im Produktmarkt. Dabei werden auch m{\"o}gliche Ineffizienzen, die sich durch Probleme bei der Integration der Firmen nach dem Zusammenschluss ergeben, untersucht. Es wird gezeigt, dass die optimale Investitionsaktivit{\"a}t sehr stark von der sich ergebenden Marktstruktur abh{\"a}ngt und es signifikante Unterschiede zwischen Insider und Outsider des Firmenzusammenschlusses gibt. In dem Modell mit linearer Nachfragefunktion und konstanten Grenzkosten steigern Zusammenschl{\"u}sse die soziale Wohlfahrt. Die zweite Arbeit betrachtet die unterschiedlichen Vorteile von kleinen und großen Firmen im Innovationswettbewerb. W{\"a}hrend große Firmen typischerweise {\"u}ber einen besseren Zugang zu Produktm{\"a}rkten verf{\"u}gen, weisen kleine Firmen h{\"a}ufig eine bessere Forschungseffizienz auf. Diese verschiedenen Vorteile werfen unmittelbar die Frage nach Kooperationen auf. Im dargestellten Modell mit vier Unternehmen haben große Firmen die M{\"o}glichkeit kleine Firmen zu kaufen. Innovationen werden mittels Patentwettbewerb modelliert. Sequentielles Bieten erm{\"o}glicht es der ersten großen Firma strategisch zu handeln um eine Reaktion der zweiten großen Firma hervorzurufen. Ergeben sich hohe Effizienzen durch den Firmenzusammenschluss, so bevorzugen die großen Firmen eine unmittelbare Akquisition und es entsteht eine symmetrische Marktstruktur. Bei geringen Effizienzen wartet die erste Firma dagegen ab und zwingt die zweite Firma dadurch zum Kauf. Somit entsteht trotz symmetrischer Ausgangssituation eine asymmetrische Marktstruktur. Weiterhin wird gezeigt, dass Akquisitionen die Chancen f{\"u}r eine erfolgreiche Innovation erh{\"o}hen. Die dritte Arbeit befasst sich mit Forschungssubventionen. Dabei wird neben dem eigentlichen Ziel der Subvention -- der F{\"o}rderung sozial erw{\"u}nschter Projekte, die nicht gen{\"u}gend private Anreize zur Durchf{\"u}hrung bieten -- die Signalwirkung einer Subvention betrachtet. Eine Staatsbeh{\"o}rde untersucht dabei die Projekte auf Risiken und Wohlfahrtswirkungen und entscheidet daraufhin {\"u}ber eine F{\"o}rderung. Dies wird in einem einfachen Signalisierungsspiel mit zwei Risikoklassen von Forschungsprojekten modelliert. Die Staatsbeh{\"o}rde bevorzugt dabei riskante Projekte, die hohe erwartete soziale Gewinne versprechen, w{\"a}hrend Banken wenig riskante Projekte mit hohen privaten Gewinnen bevorzugen. Erm{\"o}glicht die Subvention lediglich die Unterscheidung von riskanten und weniger riskanten Projekten, so ist das Signal der Beh{\"o}rde wenig hilfreich f{\"u}r die Investitionsenscheidung der Banken. Bietet das Signal jedoch zus{\"a}tzlich einen Hinweis auf die Qualit{\"a}t der Projekte, so k{\"o}nnen sich erh{\"o}hte, bzw. effizienter ausgew{\"a}hlte, private Investitionen ergeben. Im letzten Kapitel werden die wichtigsten Aussagen zusammengefasst sowie in abschließenden Bemerkungen der Zusammenhang der Ergebnisse erl{\"a}utert.}, subject = {Wettbewerbspolitik}, language = {en} } @misc{Fricke2008, author = {Fricke, Holger}, title = {Deutschland dezentral - Potenziale, Probleme, politische Perspektiven}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-36106}, year = {2008}, abstract = {Dezentrale, wettbewerblich organisierte f{\"o}derale Ordnungen, bei denen zentrale Kompetenzen auf niedrigen institutionellen Ebenen liegen und in denen Gebietsk{\"o}rperschaften eine vergleichsweise geringe Gr{\"o}ße aufweisen, sind mit betr{\"a}chtlichen Vorteilen verbunden. So ist es besser m{\"o}glich, den Pr{\"a}ferenzen der B{\"u}rger gerecht zu werden. Außerdem wird ein h{\"o}heres Wirtschaftswachstum angeregt. Die in der Theorie genannten Nachteile (unausgesch{\"o}pfte Gr{\"o}ßenvorteile, negative Auswirkungen externer Effekte, race to the bottom bei {\"o}ffentlichen Leistungen und Sozialstaat) finden hingegen nur geringe empirische Best{\"a}tigung. Vor diesem Hintergrund ist der kooperative F{\"o}deralismus der Bundesrepublik Deutschland kritisch zu bewerten. Insbesondere der L{\"a}nderfinanzausgleich als Kernelement der bundesstaatlichen Ordnung in Deutschland ist ineffizient und bremst das Wirtschaftswachstum. Um von den Vorteilen dezentraler, wettbewerblicher f{\"o}deraler Ordnungen profitieren zu k{\"o}nnen, sollte den Bundesl{\"a}ndern insbesondere substanzielle Finanzautonomie einger{\"a}umt werden. Die Heterogenit{\"a}t politischer Pr{\"a}ferenzen abh{\"a}ngig von gew{\"a}hlter staatlicher Ebene, Gr{\"o}ße von Gebietsk{\"o}rperschaften und simulierten L{\"a}nderneugliederungen wurde anhand von Bundestagswahlergebnissen untersucht. Die entsprechende Analyse befindet sich als Anhang an dieser Stelle, w{\"a}hrend die Dissertation in gedruckter Form erschienen ist.}, subject = {Fiskalf{\"o}deralismus}, language = {de} } @phdthesis{Stark2006, author = {Stark, Peter}, title = {Funktionalit{\"a}t, Effizienz und Deregulierungspotentiale der {\"o}ffentlichen Straßeninfrastrukturbereitstellung}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-27479}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2006}, abstract = {Innerhalb der Verkehrsinfrastrukturen ist das Straßenwesen mit seinen individualistischen und flexiblen Kraftfahrzeugen der große Gewinner des letzen Jahrhunderts. Trotz seines individualistischen Nutzungscharakters und privatwirtschaftlichen Beispielen in anderen L{\"a}ndern ist in Deutschland heute keine andere Verkehrsinfrastruktur so weit von einer Deregulierung entfernt wie die Straßen. Den Ausgangspunkt der Erkl{\"a}rung dieser Rigidit{\"a}t bildet eine Situationsaufnahme der bestehenden Marktorganisation. Pr{\"a}gnant erweist sich die tiefe Integration der Straßenbereitstellung in die {\"o}ffentliche Verwaltung und Entscheidungen {\"u}ber den politischen Prozess unter Beteiligung aller Gesellschaftsgruppierungen. Fragen wirft dabei unter anderen die kontroverse Diskussion um die Externalit{\"a}ten der Straßen und ihres Verkehrs auf. Die Kl{\"a}rung verweist auf eine Effizienzbetrachtung des bestehenden Bereitstellungssystems. Problematisch zeigt sich hier aufgrund der politischen Heterogenit{\"a}t der Gesellschaftsgruppierungen insbesondere der politische Entscheidungsprozess. Die Suche nach einer L{\"o}sung verweist auf privatwirtschaftliche Bereitstellungsalternativen. Es zeigt sich, dass hierf{\"u}r sowohl in rechtlicher wie auch technischer Hinsicht die notwendigen Rahmenbedingungen f{\"u}r eine privatwirtschaftliche Organisation ebenso bestehen wie auch erhebliche Effizienzgewinne zu erwarten w{\"a}ren. Als eigentliches Hindernis identifiziert sich auch hier eine Neuordnung, die notwendigerweise {\"u}ber den politischen Prozess stattfindenden muss. Der eigene Erfolg der Straßen mit seiner gewachsenen verkehrs- und fiskalpolitischen Bedeutung blockiert damit heute mehr denn je Hoffnungen auf eine L{\"o}sung der aufgelaufenen Probleme im Straßenwesen.}, subject = {OKSTRA}, language = {de} } @phdthesis{Wollmershaeuser2003, author = {Wollmersh{\"a}user, Timo}, title = {A theory of managed floating}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-8676}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2003}, abstract = {After the experience with the currency crises of the 1990s, a broad consensus has emerged among economists that such shocks can only be avoided if countries that decided to maintain unrestricted capital mobility adopt either independently floating exchange rates or very hard pegs (currency boards, dollarisation). As a consequence of this view which has been enshrined in the so-called impossible trinity all intermediate currency regimes are regarded as inherently unstable. As far as the economic theory is concerned, this view has the attractive feature that it not only fits with the logic of traditional open economy macro models, but also that for both corner solutions (independently floating exchange rates with a domestically oriented interest rate policy; hard pegs with a completely exchange rate oriented monetary policy) solid theoretical frameworks have been developed. Above all the IMF statistics seem to confirm that intermediate regimes are indeed less and less fashionable by both industrial countries and emerging market economies. However, in the last few years an anomaly has been detected which seriously challenges this paradigm on exchange rate regimes. In their influential cross-country study, Calvo and Reinhart (2000) have shown that many of those countries which had declared themselves as 'independent floaters' in the IMF statistics were charaterised by a pronounced 'fear of floating' and were actually heavily reacting to exchange rate movements, either in the form of an interest rate response, or by intervening in foreign exchange markets. The present analysis can be understood as an approach to develop a theoretical framework for this managed floating behaviour that - even though it is widely used in practice - has not attracted very much attention in monetary economics. In particular we would like to fill the gap that has recently been criticised by one of the few 'middle-ground' economists, John Williamson, who argued that "managed floating is not a regime with well-defined rules" (Williamson, 2000, p. 47). Our approach is based on a standard open economy macro model typically employed for the analysis of monetary policy strategies. The consequences of independently floating and market determined exchange rates are evaluated in terms of a social welfare function, or, to be more precise, in terms of an intertemporal loss function containing a central bank's final targets output and inflation. We explicitly model the source of the observable fear of floating by questioning the basic assumption underlying most open economy macro models that the foreign exchange market is an efficient asset market with rational agents. We will show that both policy reactions to the fear of floating (an interest rate response to exchange rate movements which we call indirect managed floating, and sterilised interventions in the foreign exchange markets which we call direct managed floating) can be rationalised if we allow for deviations from the assumption of perfectly functioning foreign exchange markets and if we assume a central bank that takes these deviations into account and behaves so as to reach its final targets. In such a scenario with a high degree of uncertainty about the true model determining the exchange rate, the rationale for indirect managed floating is the monetary policy maker's quest for a robust interest rate policy rule that performs comparatively well across a range of alternative exchange rate models. We will show, however, that the strategy of indirect managed floating still bears the risk that the central bank's final targets might be negatively affected by the unpredictability of the true exchange rate behaviour. This is where the second policy measure comes into play. The use of sterilised foreign exchange market interventions to counter movements of market determined exchange rates can be rationalised by a central bank's effort to lower the risk of missing its final targets if it only has a single instrument at its disposal. We provide a theoretical model-based foundation of a strategy of direct managed floating in which the central bank targets, in addition to a short-term interest rate, the nominal exchange rate. In particular, we develop a rule for the instrument of intervening in the foreign exchange market that is based on the failure of foreign exchange market to guarantee a reliable relationship between the exchange rate and other fundamental variables.}, language = {en} } @phdthesis{Huelsewig2003, author = {H{\"u}lsewig, Oliver}, title = {Bank lending and monetary policy transmission in Germany}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-8686}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2003}, abstract = {This study investigates the credit channel in the transmission of monetary policy in Germany by means of a structural analysis of aggregate bank loan data. We base our analysis on a stylized model of the banking firm, which specifies the loan supply decisions of banks in the light of expectations about the future course of monetary policy. Using the model as a guide, we apply a vector error correction model (VECM), in which we identify long-run cointegration relationships that can be interpreted as loan supply and loan demand equations. In this way, the identification problem inherent in reduced form approaches based on aggregate data is explicitly addressed. The short-run dynamics is explored by means of innovation analysis, which displays the reaction of the variables in the system to a monetary policy shock. The main implication of our results is that the credit channel in Germany appears to be effective, as we find that loan supply effects in addition to loan demand effects contribute to the propagation of monetary policy measures.}, language = {en} } @phdthesis{Schlegtendal2003, author = {Schlegtendal, G{\"o}tz}, title = {Die Wirkung flexibler Entgeltkomponenten auf den Arbeitsmarkt unter Ber{\"u}cksichtigung betriebswirtschaftlicher Aspekte}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-5277}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2003}, abstract = {Untersuchungsgegenstand dieser Arbeit sind die Wirkungen flexibler Entgeltkomponenten (Leistungslohn, Erfolgsbeteiligung, Kapitalbeteiligung) auf der betriebs- und volkswirtschaftlichen Ebene. Ausgangspunkt ist dabei die Analyse der herrschenden Arbeitslosigkeit im Hinblick auf die Ursachen und die Gr{\"u}nde ihrer Verfestigung und Persistenz. Dabei wird festgestellt, dass die existierende Arbeitslosigkeit {\"u}ber verschiedene Theorien erkl{\"a}rt und in mehrere Bestandteile zerlegt werden kann. Ein erheblicher Teil der Arbeitslosigkeit kann auf unflexible, {\"u}berh{\"o}hte Reall{\"o}hne zur{\"u}ckgef{\"u}hrt werden. Unterschiedliche Einfl{\"u}sse verhindern dann ein Absinken des Lohnniveaus auf ein vollbesch{\"a}ftigungskonformes Niveau. Strukturelle Ursachen, i. S. v. Fehlentwicklungen auf dem Arbeitsmarkt und ungen{\"u}genden Anpassungskapazit{\"a}ten an ver{\"a}nderte Rahmenbedingungen sind eine weitere Begr{\"u}ndung f{\"u}r die hohe und nachhaltige Arbeitslosigkeit. Entgelte, die in ihrer H{\"o}he und in ihrer sektoralen, regionalen und berufsbezogenen Ausrichtungen flexibel sind, k{\"o}nnen einen maßgeblichen Beitrag zum Abbau dieser Arbeitslosigkeit leisten. Aufbauend auf diese volkswirtschaftlichen Ansatz werden im folgenden Kapitel wesentliche betriebswirtschaftlichen Aspekte aus Sicht von Unternehmern und Arbeitnehmer dargestellt. Auf diesen Grundlagen aufbauend werden 3 Formen der Entgeltflexibilisierung im Hinblick auf ihre gesamt- und betriebswirtschaftlichen Wirkungen betrachtet. Leistungsl{\"o}hne orientieren sich entweder am quantitativ messbaren Output oder der qualitativen Arbeitsbewertung eines Arbeitnehmers. Sie tragen somit unmittelbar zu einer produktivit{\"a}tsorientierten Entlohnung bei, mit positiven Effekten f{\"u}r Unternehmen und Gesamtwirtschaft. Seit Martin Weitzman's kontrovers diskutierter "Beteiligungsgesellschaft" werden Erfolgsbeteiligungen als ein Weg gesehen um Arbeitslosigkeit abzubauen. Von der Verbindung zwischen Unternehmenserfolg und Entlohnung profitieren Arbeitnehmer, Unternehmen und Arbeitslose. Kapitalbeteiligungen haben keinen direkten Einfluss auf die Arbeitslosigkeit. Indirekt tragen sie jedoch zur erh{\"o}hter Motivation und Identifikation in den Unternehmen bei. Auch die Bereitstellung von Kapital kann je nach Modell Arbeitspl{\"a}tze sichern. Neben diesen drei Hauptformen werden auch Investivl{\"o}hne und Aktienoptionen als Wege der Flexibilisierung betrachtet. Dabei war festzustellen, Investivl{\"o}hne (zur {\"U}berwindung des Eigenkapitalmangels vieler Unternehmen) analoge Wirkungen zu Erfolgs- und Kapitalbeteiligung aufweist. Aktienoptionen hingegen betreffen in der Regel nur kleiner Gruppen von Arbeitnehmern und F{\"u}hrungskr{\"a}ften. Ein letztes Kapitel zeigt die Gestaltung eines optimalen Entgeltsystems. Dieses weist neben einen Grundentgelt, einen Leistungslohn sowie eine Erfolgsbeteiligung auf, die durch eine optionale Kapitalbeteiligung erg{\"a}nzt werden k{\"o}nnen. Dabei wird noch einmal betont, dass eine flexiblere Gestaltung von Entgelten keine alleinige L{\"o}sung der Arbeitslosigkeit bietet. Vielmehr m{\"u}ssen strukturelle Reformen, die die Machtstrukturen am Arbeitsmarkt, die H{\"o}he und Ausgestaltung der Lohnersatzleistungen sowie die St{\"a}rkung des Wirtschaftswachstums mit der Flexibilisierung Hand in Hand gehen.}, subject = {Arbeitsmarkt}, language = {de} } @phdthesis{Roennberg2010, author = {R{\"o}nnberg, Michael}, title = {Bedeutung der Spezifikation f{\"u}r Ratingmodelle}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-48895}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2010}, abstract = {Ziel dieser Arbeit ist die Untersuchung der Bedeutung der Spezifikation f{\"u}r Ratingmodelle zur Prognose von Kreditausfallwahrscheinlichkeiten. Ausgehend von dem in der Bankenpraxis etablierten Logit-Modell werden verschiedene Modellerweiterungen diskutiert und hinsichtlich ihrer Eigenschaften als Ratingmodelle empirisch und simulationsbasiert untersucht. Die Interpretierbarkeit und die Prognoseg{\"u}te der Modelle werden dabei gleichermaßen ber{\"u}cksichtigt. Besonderes Augenmerk wird auf Mixed Logit-Modelle zur Abbildung individueller Heterogenit{\"a}t gelegt. Die Ergebnisse zeigen, dass die Spezifikation einen wichtigen Einfluss auf die Eigenschaften von Ratingmodellen hat und dass insbesondere mit Hilfe von Mixed Logit-Ans{\"a}tzen sinnvoll interpretierbare Ratingmodelle mit guten Prognoseeigenschaften erlangt werden k{\"o}nnen.}, subject = {Bank}, language = {de} } @phdthesis{Wismer2013, author = {Wismer, Sebastian}, title = {Essays in Industrial Organization: Intermediation, Marketing, and Strategic Pricing}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-99102}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2013}, abstract = {This dissertation deals with certain business strategies that have become particularly relevant with the spread and development of new information technologies. The introduction explains the motivation, discusses different ways of defining the term "two-sided market", and briefly summarizes the subsequent essays. The first essay examines the effects of product information on the pricing and advertising decision of a seller who offers an experience good whose quality is unknown to consumers prior to purchase. It comprises of two theoretical models which differ with respect to their view on advertising. The analysis addresses the question how the availability of additional, potentially misleading information affects the seller's quality-dependent pricing and advertising decision. In the first model, in which both advertising and product reviews make consumers aware about product existence, the seller's optimal price turns out to be increasing in product quality. However, under certain circumstances, also the seller of a low-quality product prefers setting a high price. Within the given framework, the relationship between product quality and advertising depends on the particular parameter constellation. In the second model, some consumers are assumed to interpret price as a signal of quality, while others rely on information provided by product reviews. Consequently, and differently from the first part, pricing may indirectly inform consumers about product quality. On the one hand, in spite of asymmetric information on product quality, equilibria exist that feature full information pricing, which is in line with previous results presented by the signaling literature. On the other hand, potentially misleading product reviews may rationalize further pricing patterns. Moreover, assuming that firms can manipulate product reviews by investing in concealed marketing, equilibria can arise in which a high price signals low product quality. However, in these extreme cases, only a few (credulous) consumers consider buying the product. The second essay deals with trade platforms whose operators not only allow sellers to offer their products to consumers, but also offer products themselves. In this context, the platform operator faces a hold-up problem if he sets classical two-part tariffs (on which previous literature on two-sided markets focussed) as potential competition between the platform operator and sellers reduces platform attractiveness. Since some sellers refuse to join the platform, products whose existence is not known to the platform operator in the first place and which can only be established by better informed sellers may not be offered at all. However, revenue-based fees lower the platform operator's incentives to compete with sellers, increasing platform attractiveness. Therefore, charging such proportional fees can be profitable, what may explain why several trade platforms indeed do charge proportional fees. The third essay examines settings in which sellers can be active both on an intermediary's trade platform and in other sales channels. It explores the sellers' incentives to set different prices across sales channels within the given setup. Afterwards, it analyzes the intermediary's tariff decision, taking into account the implications on consumers' choice between different sales channels. The analysis particularly focusses on the effects of a no-discrimination rule which several intermediaries impose, but which appears to be controversial from a competition policy view. It identifies under which circumstances the intermediary prefers restricting sellers' pricing decisions by imposing a no-discrimination rule, attaining direct control over the split-up of customers on sales channels. Moreover, it illustrates that such rules can have both positive and negative effects on welfare within the given framework.}, subject = {Industrie{\"o}konomie}, language = {en} } @phdthesis{DonadoGomez2011, author = {Donado Gomez, Alejandro}, title = {Trade Unions and Occupational Health and Safety}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-56076}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2011}, abstract = {This thesis comprises three essays that study the impact of trade unions on occupational health and safety (OHS). The first essay proposes a theoretical model that highlights the crucial role that unions have played throughout history in making workplaces safer. Firms traditionally oppose better health standards. Workplace safety is costly for firms but increases the average health of workers and thereby the aggregate labour supply. A laissez-faire approach in which firms set safety standards is suboptimal as workers are not fully informed of health risks associated with their jobs. Safety standards set by better-informed trade unions are output and welfare increasing. The second essay extends the model to a two-country world consisting of the capital-rich "North" and the capital-poor "South". The North has trade unions that set high OHS standards. There are no unions in the South and OHS standards are low. Trade between these two countries can imply a reduction in safety standards in the North, lowering the positive welfare effects of trade. Moreover, when trade unions are also established in the South, northern OHS standards might be further reduced. The third essay studies the impact of unions on OHS from an empirical perspective. It focuses on one component of OHS: occupational injuries. A literature summary including 25 empirical studies shows that most studies associate unions with less fatal occupational injuries. This is in perfect line with the anecdotal evidence and the basic model from the first essay. However, the literature summary also shows that most empirical studies associate unions with more nonfatal occupational injuries. This puzzling result has been explained in the literature by (1) lower underreporting in unionized workplaces, (2) unions being more able to organize hazardous workplaces, and (3) unionized workers preferring higher wages at the expense of better working conditions. Using individual-level panel data, this essay presents evidence against all these three explanations. However, it cannot reject the hypothesis that workers reduce their precautionary behaviour when they join a trade union. Hence, the puzzle seems to be due to a strong moral hazard effect. These empirical results suggest that the basic model from the first essay needs to be extended to account for this moral hazard effect.}, subject = {Arbeitsschutz}, language = {en} } @phdthesis{Brause2010, author = {Brause, Alexander F.}, title = {Foreign Exchange Market Interventions: New Empirical Views of Emerging Markets}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-55207}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2010}, abstract = {Since the beginning, central banks have used a wide range of instruments to achieve their ultimate purpose of price stability. One measure in the authorities’ toolbox is a foreign exchange market intervention. The discussion about this instrument has come a long way. So far, the discussion relied mainly on industrialized countries'’ experiences. The negative outcomes of most studies with respect to the effectiveness of the intervention tool, opened up a discussion, whether interventions should be used by the authorities to manage exchange rate aspects. Consequently, the question about the dynamics of foreign exchange market interventions is now open to the subject-matter of developing and emerging market countries. Monetary policy in those countries often constitutes an active management of exchange rates. However, the basic discussions about intervention dynamics have had one essential drawback. Neither the primary literature of industrialized countries nor studies dealing with developing countries have considered the fact that intervention purposes and the corresponding effects are likely to vary over time. This thesis is designed to provide the reader with essential issues of central bank interventions, and aims to give further, as well as new contributions, in terms of empirical research on interventions in emerging markets. The main objectives of this study are the analysis of central bank intervention motives, and the corresponding effects on exchange rates in emerging markets. The time dependency of both issues is explicitly considered, which states a novelty in academic research of central bank interventions. Additionally, the outcomes are discussed against the background of underlying economic and monetary policy fundamentals. This could well serve as a starting point for further research.}, subject = {Schwellenl{\"a}nder}, language = {en} } @phdthesis{Steinmetz2009, author = {Steinmetz, Alexander}, title = {Essays on Strategic Behavior and Dynamic Oligopoly Competition}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-47934}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2009}, abstract = {This thesis deals with three selected dimensions of strategic behavior, namely investment in R\&D, mergers and acquisitions, and inventory decisions in dynamic oligopolies. The question the first essay addresses is how the market structure evolves due to innovative activities when firms' level of technological competence is valuable for more than one project. The focus of the work is the analysis of the effect of learning-by-doing and organizational forgetting in R\&D on firms' incentives to innovate. A dynamic step-by-step innovation model with history dependency is developed. Firms can accumulate knowledge by investing in R\&D. As a benchmark without knowledge accumulation it is shown that relaxing the usual assumption of imposed imitation yields additional strategic effects. Therefore, the leader's R\&D effort increases with the gap as she is trying to avoid competition in the future. When firms gain experience by performing R\&D, the resulting effect of knowledge induces technological leaders to rest on their laurels which allows followers to catch up. Contrary to the benchmark case the leader's innovation effort declines with the lead. This causes an equilibrium where the incentives to innovate are highest when competition is most intense. Using a model of oligopoly in general equilibrium the second essay analyzes the integration of economies that might be accompanied by cross-border merger waves. Studying economies which prior to trade were in stable equilibrium where mergers were not profitable, we show that globalization can trigger cross-border merger waves for a sufficiently large heterogeneity in marginal cost. In partial equilibrium, consumers benefit from integration even when a merger wave is triggered which considerably lowers intensity of competition. Welfare increases. In contrast, in general equilibrium where interactions between markets and therefore effects on factor prices are considered, gains from trade can only be realized by reallocation of resources. The higher the technological dissimilarity between countries the better can efficiency gains be realized in integrated general equilibrium. The overall welfare effect of integration is positive when all firms remain active but indeterminate when firms exit or are absorbed due to a merger wave. It is possible for decreasing competition to dominate the welfare gain from more efficient resource allocation across sectors. Allowing for firms' entry alters results as in an integrated world coexistence of firms of different countries is never possible. Comparative advantages with respect to entry and production are important for realizing efficiency gains from trade. The third essay analyzes the interaction between price and inventory decisions in an oligopoly industry and its implications for the dynamics of prices. The work extends existing literature and especially the work of Hall and Rust (2007) to endogenous prices and strategic oligopoly competition. We show that the optimal decision rule is an (S,s) order policy and prices and inventories are strategic substitutes. Fixed ordering costs generate infrequent orders. Additionally, with strategic competition in prices, (S,s) inventory behavior together with demand uncertainty generates cyclical pattern in prices The last chapter presents some concluding remarks on the results of the essays.}, subject = {Wettbewerbsstrategie}, language = {en} } @phdthesis{Schumm2009, author = {Schumm, Irene}, title = {Lessons Learned From Germany's 2001-2006 Labor Market Reforms}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-43705}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2009}, abstract = {In der Dissertation werden die Gesetze zur Reform des Arbeitsmarktes in Deutschland, besser bekannt als Hartz-Reformen, untersucht. Zun{\"a}chst wird ein {\"U}berblick {\"u}ber die wichtigsten {\"A}nderungen aus den vier Reform-Paketen gegeben sowie die Effekte, welche man sich davon versprach. Des Weiteren werden zwei grundlegende Reformmaßnahmen, n{\"a}mlich die Zusammenlegung der Arbeitslosen- und Sozialhilfe (Hartz IV) sowie die Verk{\"u}rzung der Bezugsdauer der Arbeitslosenversicherungsleistung, analysiert, um deren Auswirkungen auf das individuelle Verhalten und die aggregierte {\"O}konomie zu evaluieren. Diese Untersuchung geschieht im Rahmen eines Matching-Modells mit optimaler verweildauerabh{\"a}ngiger Suchleistung. Mit Hilfe von Semi-Markov-Methoden, deren Anwendung in der Arbeitsmarkttheorie beschrieben wird, findet schließlich eine Aggregierung statt. Auf diese Weise k{\"o}nnen die Auswirkungen der Hartz-IV-Reformen auf die Verweildauer in Arbeitslosigkeit, die optimale Suchleistung und die Arbeitslosigkeit quantifiziert werden.}, subject = {Hartz-Reform}, language = {en} } @article{RoemeisHerwegMueller2022, author = {R{\"o}meis, Fabio and Herweg, Fabian and M{\"u}ller, Daniel}, title = {Salience bias and overwork}, series = {Games}, volume = {13}, journal = {Games}, number = {1}, issn = {2073-4336}, doi = {10.3390/g13010015}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-255213}, year = {2022}, abstract = {In this study, we enrich a standard principal-agent model with hidden action by introducing salience-biased perception on the agent's side. The agent's misguided focus on salient payoffs, which leads the agent's and the principal's probability assessments to diverge, has two effects: First, the agent focuses too much on obtaining a bonus, which facilitates incentive provision. Second, the principal may exploit the diverging probability assessments to relax participation. We show that salience bias can reverse the nature of the inefficiency arising from moral hazard; i.e., the principal does not necessarily provide insufficient incentives that result in inefficiently low effort but instead may well provide excessive incentives that result in inefficiently high effort.}, language = {en} } @phdthesis{Gruendler2018, author = {Gr{\"u}ndler, Klaus}, title = {A Contribution to the Empirics of Economic Development - The Role of Technology, Inequality, and the State}, edition = {1. Auflage}, publisher = {W{\"u}rzburg University Press}, address = {W{\"u}rzburg}, isbn = {978-3-95826-072-6 (Print)}, doi = {10.25972/WUP-978-3-95826-073-3}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-141520}, school = {W{\"u}rzburg University Press}, pages = {300}, year = {2018}, abstract = {This dissertation contributes to the empirical analysis of economic development. The continuing poverty in many Sub-Saharan-African countries as well as the declining trend in growth in the advanced economies that was initiated around the turn of the millennium raises a number of new questions which have received little attention in recent empirical studies. Is culture a decisive factor for economic development? Do larger financial markets trigger positive stimuli with regard to incomes, or is the recent increase in their size in advanced economies detrimental to economic growth? What causes secular stagnation, i.e. the reduction in growth rates of the advanced economies observable over the past 20 years? What is the role of inequality in the growth process, and how do governmental attempts to equalize the income distribution affect economic development? And finally: Is the process of democratization accompanied by an increase in living standards? These are the central questions of this doctoral thesis. To facilitate the empirical analysis of the determinants of economic growth, this dissertation introduces a new method to compute classifications in the field of social sciences. The approach is based on mathematical algorithms of machine learning and pattern recognition. Whereas the construction of indices typically relies on arbitrary assumptions regarding the aggregation strategy of the underlying attributes, utilization of Support Vector Machines transfers the question of how to aggregate the individual components into a non-linear optimization problem. Following a brief overview of the theoretical models of economic growth provided in the first chapter, the second chapter illustrates the importance of culture in explaining the differences in incomes across the globe. In particular, if inhabitants have a lower average degree of risk-aversion, the implementation of new technology proceeds much faster compared with countries with a lower tendency towards risk. However, this effect depends on the legal and political framework of the countries, their average level of education, and their stage of development. The initial wealth of individuals is often not sufficient to cover the cost of investments in both education and new technologies. By providing loans, a developed financial sector may help to overcome this shortage. However, the investigations in the third chapter show that this mechanism is dependent on the development levels of the economies. In poor countries, growth of the financial sector leads to better education and higher investment levels. This effect diminishes along the development process, as intermediary activity is increasingly replaced by speculative transactions. Particularly in times of low technological innovation, an increasing financial sector has a negative impact on economic development. In fact, the world economy is currently in a phase of this kind. Since the turn of the millennium, growth rates in the advanced economies have experienced a multi-national decline, leading to an intense debate about "secular stagnation" initiated at the beginning of 2015. The fourth chapter deals with this phenomenon and shows that the growth potentials of new technologies have been gradually declining since the beginning of the 2000s. If incomes are unequally distributed, some individuals can invest less in education and technological innovations, which is why the fifth chapter identifies an overall negative effect of inequality on growth. This influence, however, depends on the development level of countries. While the negative effect is strongly pronounced in poor economies with a low degree of equality of opportunity, this influence disappears during the development process. Accordingly, redistributive polices of governments exert a growth-promoting effect in developing countries, while in advanced economies, the fostering of equal opportunities is much more decisive. The sixth chapter analyzes the growth effect of the political environment and shows that the ambiguity of earlier studies is mainly due to unsophisticated measurement of the degree of democratization. To solve this problem, the chapter introduces a new method based on mathematical algorithms of machine learning and pattern recognition. While the approach can be used for various classification problems in the field of social sciences, in this dissertation it is applied for the problem of democracy measurement. Based on different country examples, the chapter shows that the resulting SVMDI is superior to other indices in modeling the level of democracy. The subsequent empirical analysis emphasizes a significantly positive growth effect of democracy measured via SVMDI.}, subject = {Wirtschaftsentwicklung}, language = {en} } @phdthesis{Schamberger2022, author = {Schamberger, Tamara Svenja}, title = {Methodological Advances in Composite-based Structural Equation Modeling}, isbn = {978-90-365-5375-9}, doi = {10.3990/1.9789036553759}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-276794}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2022}, abstract = {This thesis is about composite-based structural equation modeling. Structural equation modeling in general can be used to model both theoretical concepts and their relations to one another. In traditional factor-based structural equation modeling, these theoretical concepts are modeled as common factors, i.e., as latent variables which explain the covariance structure of their observed variables. In contrast, in composite-based structural equation modeling, the theoretical concepts can be modeled both as common factors and as composites, i.e., as linear combinations of observed variables that convey all the information between their observed variables and all other variables in the model. This thesis presents some methodological advancements in the field of composite-based structural equation modeling. In all, this thesis is made up of seven chapters. Chapter 1 provides an overview of the underlying model, as well as explicating the meaning of the term composite-based structural equation modeling. Chapter 2 gives guidelines on how to perform Monte Carlo simulations in the statistic software R using the package "cSEM" with various estimators in the context of composite-based structural equation modeling. These guidelines are illustrated by an example simulation study that investigates the finite sample behavior of partial least squares path modeling (PLS-PM) and consistent partial least squares (PLSc) estimates, particularly regarding the consequences of sample correlations between measurement errors on statistical inference. The third Chapter presents estimators of composite-based structural equation modeling that are robust in responding to outlier distortion. For this purpose, estimators of composite-based structural equation modeling, PLS-PM and PLSc, are adapted. Unlike the original estimators, these adjustments can avoid distortion that could arise from random outliers in samples, as is demonstrated through a simulation study. Chapter 4 presents an approach to performing predictions based on models estimated with ordinal partial least squares and ordinal consistent partial least squares. Here, the observed variables lie on an ordinal categorical scale which is explicitly taken into account in both estimation and prediction. The prediction performance is evaluated by means of a simulation study. In addition, the chapter gives guidelines on how to perform such predictions using the R package "cSEM". This is demonstrated by means of an empirical example. Chapter 5 introduces confirmatory composite analysis (CCA) for research in "Human Development". Using CCA, composite models can be estimated and assessed. This chapter uses the Henseler-Ogasawara specification for composite models, allowing, for example, the maximum likelihood method to be used for parameter estimation. Since the maximum likelihood estimator based on the Henseler-Ogasawara specification has limitations, Chapter 6 presents another specification of the composite model by means of which composite models can be estimated with the maximum likelihood method. The results of this maximum likelihood estimator are compared with those of PLS-PM, thus showing that this maximum likelihood estimator gives valid results even in finite samples. The last chapter, Chapter 7, gives an overview of the development and different strands of composite-based structural equation modeling. Additionally, here I examine the contribution the previous chapters make to the wider distribution of composite-based structural equation modeling.}, subject = {Structural Equation Modeling}, language = {en} } @phdthesis{Schuberth2019, author = {Schuberth, Florian}, title = {Composite-based Methods in Structural Equation Modeling}, doi = {10.25972/OPUS-15465}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:20-opus-154653}, school = {Universit{\"a}t W{\"u}rzburg}, year = {2019}, abstract = {This dissertation deals with composite-based methods for structural equation models with latent variables and their enhancement. It comprises five chapters. Besides a brief introduction in the first chapter, the remaining chapters consisting of four essays cover the results of my PhD studies.Two of the essays have already been published in an international journal. The first essay considers an alternative way of construct modeling in structural equation modeling.While in social and behavioral sciences theoretical constructs are typically modeled as common factors, in other sciences the common factor model is an inadequate way construct modeling due to its assumptions. This essay introduces the confirmatory composite analysis (CCA) analogous to confirmatory factor analysis (CFA). In contrast to CFA, CCA models theoretical constructs as composites instead of common factors. Besides the theoretical presentation of CCA and its assumptions, a Monte Carlo simulation is conducted which demonstrates that misspecifications of the composite model can be detected by the introduced test for overall model fit. The second essay rises the question of how parameter differences can be assessed in the framework of partial least squares path modeling. Since the standard errors of the estimated parameters have no analytical closed-form, the t- and F-test known from regression analysis cannot be directly used to test for parameter differences. However, bootstrapping provides a solution to this problem. It can be employed to construct confidence intervals for the estimated parameter differences, which can be used for making inferences about the parameter difference in the population. To guide practitioners, guidelines were developed and demonstrated by means of empirical examples. The third essay answers the question of how ordinal categorical indicators can be dealt with in partial least squares path modeling. A new consistent estimator is developed which combines the polychoric correlation and partial least squares path modeling to appropriately deal with the qualitative character of ordinal categorical indicators. The new estimator named ordinal consistent partial least squares combines consistent partial least squares with ordinal partial least squares. Besides its derivation, a Monte Carlo simulation is conducted which shows that the new estimator performs well in finite samples. Moreover, for illustration, an empirical example is estimated by ordinal consistent partial least squares. The last essay introduces a new consistent estimator for polynomial factor models. Similarly to consistent partial least squares, weights are determined to build stand-ins for the latent variables, however a non-iterative approach is used. A Monte Carlo simulation shows that the new estimator behaves well in finite samples.}, subject = {Strukturgleichungsmodell}, language = {en} }