@phdthesis{Schoppa2023, author = {Schoppa, Lukas}, title = {Dynamics in the flood vulnerability of companies}, doi = {10.25932/publishup-59242}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-592424}, school = {Universit{\"a}t Potsdam}, pages = {X, 165}, year = {2023}, abstract = {River flooding is a constant peril for societies, causing direct economic losses in the order of \$100 billion worldwide each year. Under global change, the prolonged concentration of people and assets in floodplains is accompanied by an emerging intensification of flood extremes due to anthropogenic global warming, ultimately exacerbating flood risk in many regions of the world. Flood adaptation plays a key role in the mitigation of impacts, but poor understanding of vulnerability and its dynamics limits the validity of predominant risk assessment methods and impedes effective adaptation strategies. Therefore, this thesis investigates new methods for flood risk assessment that embrace the complexity of flood vulnerability, using the understudied commercial sector as an application example. Despite its importance for accurate risk evaluation, flood loss modeling has been based on univariable and deterministic stage-damage functions for a long time. However, such simplistic methods only insufficiently describe the large variation in damage processes, which initiated the development of multivariable and probabilistic loss estimation techniques. The first study of this thesis developed flood loss models for companies that are based on emerging statistical and machine learning approaches (i.e., random forest, Bayesian network, Bayesian regression). In a benchmarking experiment on basis of object-level loss survey data, the study showed that all proposed models reproduced the heterogeneity in damage processes and outperformed conventional stage-damage functions with respect to predictive accuracy. Another advantage of the novel methods is that they convey probabilistic information in predictions, which communicates the large remaining uncertainties transparently and, hence, supports well-informed risk assessment. Flood risk assessment combines vulnerability assessment (e.g., loss estimation) with hazard and exposure analyses. Although all of the three risk drivers interact and change over time, such dependencies and dynamics are usually not explicitly included in flood risk models. Recently, systemic risk assessment that dissolves the isolated consideration of risk drivers has gained traction, but the move to holistic risk assessment comes with limited thoroughness in terms of loss estimation and data limitations. In the second study, I augmented a socio-hydrological system dynamics model for companies in Dresden, Germany, with the multivariable Bayesian regression loss model from the first study. The additional process-detail and calibration data improved the loss estimation in the systemic risk assessment framework and contributed to more accurate and reliable simulations. The model uses Bayesian inference to quantify uncertainty and learn the model parameters from a combination of prior knowledge and diverse data. The third study demonstrates the potential of the socio-hydrological flood risk model for continuous, long-term risk assessment and management. Using hydroclimatic ad socioeconomic forcing data, I projected a wide range of possible risk trajectories until the end of the century, taking into account the adaptive behavior of companies. The study results underline the necessity of increased adaptation efforts to counteract the expected intensification of flood risk due to climate change. A sensitivity analysis of the effectiveness of different adaptation measures and strategies revealed that optimized adaptation has the potential to mitigate flood risk by up to 60\%, particularly when combining structural and non-structural measures. Additionally, the application shows that systemic risk assessment is capable of capturing adverse long-term feedbacks in the human-flood system such as the levee effect. Overall, this thesis advances the representation of vulnerability in flood risk modeling by offering modeling solutions that embrace the complexity of human-flood interactions and quantify uncertainties consistently using probabilistic modeling. The studies show how scarce information in data and previous experiments can be integrated in the inference process to provide model predictions and simulations that are reliable and rich in information. Finally, the focus on the flood vulnerability of companies provides new insights into the heterogeneous damage processes and distinct flood coping of this sector.}, language = {en} } @phdthesis{Penisson2010, author = {P{\´e}nisson, Sophie}, title = {Conditional limit theorems for multitype branching processes and illustration in epidemiological risk analysis}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-45307}, school = {Universit{\"a}t Potsdam}, year = {2010}, abstract = {This thesis is concerned with the issue of extinction of populations composed of different types of individuals, and their behavior before extinction and in case of a very late extinction. We approach this question firstly from a strictly probabilistic viewpoint, and secondly from the standpoint of risk analysis related to the extinction of a particular model of population dynamics. In this context we propose several statistical tools. The population size is modeled by a branching process, which is either a continuous-time multitype Bienaym{\´e}-Galton-Watson process (BGWc), or its continuous-state counterpart, the multitype Feller diffusion process. We are interested in different kinds of conditioning on non-extinction, and in the associated equilibrium states. These ways of conditioning have been widely studied in the monotype case. However the literature on multitype processes is much less extensive, and there is no systematic work establishing connections between the results for BGWc processes and those for Feller diffusion processes. In the first part of this thesis, we investigate the behavior of the population before its extinction by conditioning the associated branching process X_t on non-extinction (X_t≠0), or more generally on non-extinction in a near future 0≤θ<∞ (X_{t+θ}≠0), and by letting t tend to infinity. We prove the result, new in the multitype framework and for θ>0, that this limit exists and is non-degenerate. This reflects a stationary behavior for the dynamics of the population conditioned on non-extinction, and provides a generalization of the so-called Yaglom limit, corresponding to the case θ=0. In a second step we study the behavior of the population in case of a very late extinction, obtained as the limit when θ tends to infinity of the process conditioned by X_{t+θ}≠0. The resulting conditioned process is a known object in the monotype case (sometimes referred to as Q-process), and has also been studied when X_t is a multitype Feller diffusion process. We investigate the not yet considered case where X_t is a multitype BGWc process and prove the existence of the associated Q-process. In addition, we examine its properties, including the asymptotic ones, and propose several interpretations of the process. Finally, we are interested in interchanging the limits in t and θ, as well as in the not yet studied commutativity of these limits with respect to the high-density-type relationship between BGWc processes and Feller processes. We prove an original and exhaustive list of all possible exchanges of limit (long-time limit in t, increasing delay of extinction θ, diffusion limit). The second part of this work is devoted to the risk analysis related both to the extinction of a population and to its very late extinction. We consider a branching population model (arising notably in the epidemiological context) for which a parameter related to the first moments of the offspring distribution is unknown. We build several estimators adapted to different stages of evolution of the population (phase growth, decay phase, and decay phase when extinction is expected very late), and prove moreover their asymptotic properties (consistency, normality). In particular, we build a least squares estimator adapted to the Q-process, allowing a prediction of the population development in the case of a very late extinction. This would correspond to the best or to the worst-case scenario, depending on whether the population is threatened or invasive. These tools enable us to study the extinction phase of the Bovine Spongiform Encephalopathy epidemic in Great Britain, for which we estimate the infection parameter corresponding to a possible source of horizontal infection persisting after the removal in 1988 of the major route of infection (meat and bone meal). This allows us to predict the evolution of the spread of the disease, including the year of extinction, the number of future cases and the number of infected animals. In particular, we produce a very fine analysis of the evolution of the epidemic in the unlikely event of a very late extinction.}, language = {en} } @phdthesis{GomezZapata2023, author = {G{\´o}mez Zapata, Juan Camilo}, title = {Towards unifying approaches in exposure modelling for scenario-based multi-hazard risk assessments}, doi = {10.25932/publishup-58614}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-586140}, school = {Universit{\"a}t Potsdam}, pages = {iii, xiii, 155}, year = {2023}, abstract = {This cumulative thesis presents a stepwise investigation of the exposure modelling process for risk assessment due to natural hazards while highlighting its, to date, not much-discussed importance and associated uncertainties. Although "exposure" refers to a very broad concept of everything (and everyone) that is susceptible to damage, in this thesis it is narrowed down to the modelling of large-area residential building stocks. Classical building exposure models for risk applications have been constructed fully relying on unverified expert elicitation over data sources (e.g., outdated census datasets), and hence have been implicitly assumed to be static in time and in space. Moreover, their spatial representation has also typically been simplified by geographically aggregating the inferred composition onto coarse administrative units whose boundaries do not always capture the spatial variability of the hazard intensities required for accurate risk assessments. These two shortcomings and the related epistemic uncertainties embedded within exposure models are tackled in the first three chapters of the thesis. The exposure composition of large-area residential building stocks is studied on the scope of scenario-based earthquake loss models. Then, the proposal of optimal spatial aggregation areas of exposure models for various hazard-related vulnerabilities is presented, focusing on ground-shaking and tsunami risks. Subsequently, once the experience is gained in the study of the composition and spatial aggregation of exposure for various hazards, this thesis moves towards a multi-hazard context while addressing cumulative damage and losses due to consecutive hazard scenarios. This is achieved by proposing a novel method to account for the pre-existing damage descriptions on building portfolios as a key input to account for scenario-based multi-risk assessment. Finally, this thesis shows how the integration of the aforementioned elements can be used in risk communication practices. This is done through a modular architecture based on the exploration of quantitative risk scenarios that are contrasted with social risk perceptions of the directly exposed communities to natural hazards. In Chapter 1, a Bayesian approach is proposed to update the prior assumptions on such composition (i.e., proportions per building typology). This is achieved by integrating high-quality real observations and then capturing the intrinsic probabilistic nature of the exposure model. Such observations are accounted as real evidence from both: field inspections (Chapter 2) and freely available data sources to update existing (but outdated) exposure models (Chapter 3). In these two chapters, earthquake scenarios with parametrised ground motion fields were transversally used to investigate the role of such epistemic uncertainties related to the exposure composition through sensitivity analyses. Parametrised scenarios of seismic ground shaking were the hazard input utilised to study the physical vulnerability of building portfolios. The second issue that was investigated, which refers to the spatial aggregation of building exposure models, was investigated within two decoupled vulnerability contexts: due to seismic ground shaking through the integration of remote sensing techniques (Chapter 3); and within a multi-hazard context by integrating the occurrence of associated tsunamis (Chapter 4). Therein, a careful selection of the spatial aggregation entities while pursuing computational efficiency and accuracy in the risk estimates due to such independent hazard scenarios (i.e., earthquake and tsunami) are discussed. Therefore, in this thesis, the physical vulnerability of large-area building portfolios due to tsunamis is considered through two main frames: considering and disregarding the interaction at the vulnerability level, through consecutive and decoupled hazard scenarios respectively, which were then contrasted. Contrary to Chapter 4, where no cumulative damages are addressed, in Chapter 5, data and approaches, which were already generated in former sections, are integrated with a novel modular method to ultimately study the likely interactions at the vulnerability level on building portfolios. This is tested by evaluating cumulative damages and losses after earthquakes with increasing magnitude followed by their respective tsunamis. Such a novel method is grounded on the possibility of re-using existing fragility models within a probabilistic framework. The same approach is followed in Chapter 6 to forecast the likely cumulative damages to be experienced by a building stock located in a volcanic multi-hazard setting (ash-fall and lahars). In that section, special focus was made on the manner the forecasted loss metrics are communicated to locally exposed communities. Co-existing quantitative scientific approaches (i.e., comprehensive exposure models; explorative risk scenarios involving single and multiple hazards) and semi-qualitative social risk perception (i.e., level of understanding that the exposed communities have about their own risk) were jointly considered. Such an integration ultimately allowed this thesis to also contribute to enhancing preparedness, science divulgation at the local level as well as technology transfer initiatives. Finally, a synthesis of this thesis along with some perspectives for improvement and future work are presented.}, language = {en} } @techreport{AngenendtKochTjaden2023, type = {Working Paper}, author = {Angenendt, Steffen and Koch, Anne and Tjaden, Jasper}, title = {Predicting irregular migration}, series = {high hopes, meagre results}, volume = {11}, journal = {high hopes, meagre results}, publisher = {Stiftung Wissenschaft und Politik (SWP)}, address = {Berlin}, issn = {2747-5123}, doi = {10.18449/2023RP11}, pages = {36}, year = {2023}, abstract = {German and European migration policy operates in permanent crisis mode. Sudden increases in irregular immigration create a sense of loss of control, which is instrumentalised by populist forces. This has generated great interest in quantitative migration predictions. High expectations are placed in the AI-based tools currently under devel\&\#x00ad;op\&\#x00ad;ment for forecasting irregular migration. The potential applications of these tools are manifold. They range from managing and strengthening the EU's reception capacity and border protections to configuring humanitarian aid provision and longer-term planning of development programmes. There is a significant gap between the expectations placed in the new instruments and their practical utility. Technical limits exist, medium-term forecasts are methodologically implausible, and channels for feeding the results into political decision-making processes are lacking. The great demand for predictions is driven by the political functions of migration prediction, which include its uses in political communication, funding acquisition and legitimisation of political decisions. Investment in the quality of the underlying data will be more productive than developing a succession of new prediction tools. Funding for applications in emergency relief and development cooperation should be prioritised. Crisis early warning and risk analysis should also be strengthened and their networking improved.}, language = {en} } @techreport{AngenendtKochTjaden2023, type = {Working Paper}, author = {Angenendt, Steffen and Koch, Anne and Tjaden, Jasper}, title = {Die Prognose ungeregelter Wanderungen}, series = {SWP-Studie}, volume = {10}, journal = {SWP-Studie}, publisher = {Stiftung Wissenschaft und Politik (SWP)}, address = {Berlin}, issn = {2747-5115}, doi = {10.18449/2023S10}, pages = {41}, year = {2023}, abstract = {Die deutsche und europ{\"a}ische Migrationspolitik befindet sich im permanenten Krisenmodus. Pl{\"o}tzliche Anstiege ungeregelter Zuwanderung n{\"a}hren ein Gef{\"u}hl von Kontrollverlust, das wiederum von populistischen Kr{\"a}ften instrumentalisiert wird. Daher hat die Politik großes Interesse an quantitativen Migrationsprognosen. Besondere Erwartungen wecken KI-gest{\"u}tzte Instrumente zur Vorhersage ungeregelter Wanderungsbewegungen, wie sie zurzeit entwickelt werden. Die Anwendungsfelder dieser Instrumente sind vielf{\"a}ltig. Sie reichen von einer St{\"a}rkung der Aufnahmekapazit{\"a}ten in der EU {\"u}ber die pr{\"a}ventive Versch{\"a}rfung von Grenzschutzmaßnahmen und eine bedarfsgerechte Bereitstellung von Ressourcen in humanit{\"a}ren Krisen bis zur l{\"a}ngerfristigen entwicklungspolitischen Programmplanung. Allerdings besteht eine deutliche Kluft zwischen den Erwartungen an die neuen Instrumente und ihrem praktischen Mehrwert. Zum einen sind die technischen M{\"o}glichkeiten begrenzt, und mittelfristige Vorhersagen zu ungeregelten Wanderungen sind methodisch kaum m{\"o}glich. Zum anderen mangelt es an Verfahren, um die Ergebnisse in politische Entscheidungsprozesse einfließen zu lassen. Die hohe Nachfrage nach Prognosen erkl{\"a}rt sich aus den politischen Funktionen quantitativer Migrationsvorhersage - beispielsweise ihrem Potential f{\"u}r die politische Kommunikation, die Mitteleinwerbung und die Legitimierung politischer Entscheidungen. Investitionen in die Qualit{\"a}t der den Prognosen zugrunde liegenden Daten sind sinnvoller als die Entwicklung immer neuer Instrumente. Bei der Mittelvergabe f{\"u}r Prognosen sollten Anwendungen in der Nothilfe und der Entwicklungszusammenarbeit priorisiert werden. Zudem sollten die Krisenfr{\"u}herkennung und die Risikoanalyse gest{\"a}rkt werden, und die beteiligten Akteure sollten sich besser vernetzen.}, language = {de} }