@misc{TrietDungMerzetal.2018, author = {Triet, Nguyen Van Khanh and Dung, Nguyen Viet and Merz, Bruno and Apel, Heiko}, title = {Towards risk-based flood management in highly productive paddy rice cultivation}, series = {Postprints der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, number = {931}, issn = {1866-8372}, doi = {10.25932/publishup-44603}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-446032}, pages = {2859 -- 2876}, year = {2018}, abstract = {Flooding is an imminent natural hazard threatening most river deltas, e.g. the Mekong Delta. An appropriate flood management is thus required for a sustainable development of the often densely populated regions. Recently, the traditional event-based hazard control shifted towards a risk management approach in many regions, driven by intensive research leading to new legal regulation on flood management. However, a large-scale flood risk assessment does not exist for the Mekong Delta. Particularly, flood risk to paddy rice cultivation, the most important economic activity in the delta, has not been performed yet. Therefore, the present study was developed to provide the very first insight into delta-scale flood damages and risks to rice cultivation. The flood hazard was quantified by probabilistic flood hazard maps of the whole delta using a bivariate extreme value statistics, synthetic flood hydrographs, and a large-scale hydraulic model. The flood risk to paddy rice was then quantified considering cropping calendars, rice phenology, and harvest times based on a time series of enhanced vegetation index (EVI) derived from MODIS satellite data, and a published rice flood damage function. The proposed concept provided flood risk maps to paddy rice for the Mekong Delta in terms of expected annual damage. The presented concept can be used as a blueprint for regions facing similar problems due to its generic approach. Furthermore, the changes in flood risk to paddy rice caused by changes in land use currently under discussion in the Mekong Delta were estimated. Two land-use scenarios either intensifying or reducing rice cropping were considered, and the changes in risk were presented in spatially explicit flood risk maps. The basic risk maps could serve as guidance for the authorities to develop spatially explicit flood management and mitigation plans for the delta. The land-use change risk maps could further be used for adaptive risk management plans and as a basis for a cost-benefit of the discussed land-use change scenarios. Additionally, the damage and risks maps may support the recently initiated agricultural insurance programme in Vietnam.}, language = {en} } @article{GhaffarJomaaMeonetal.2021, author = {Ghaffar, Salman and Jomaa, Seifeddine and Meon, G{\"u}nter and Rode, Michael}, title = {Spatial validation of a semi-distributed hydrological nutrient transport model}, series = {Journal of hydrology}, volume = {593}, journal = {Journal of hydrology}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0022-1694}, doi = {10.1016/j.jhydrol.2020.125818}, pages = {13}, year = {2021}, abstract = {Semi-distributed hydrological and water quality models are increasingly used as innovative and scientific-based management tools. However, their application is usually restricted to the gauging stations where they are originally calibrated, limiting their spatial capability. In this study, the semi-distributed hydrological water quality model HYPE (HYdrological Predictions for the Environment) was tested spatially to represent nitrate-N (NO3- N) and total phosphorus (TP) concentrations and loads of the nested and heterogeneous Selke catchment (463 km(2)) in central Germany. First, an automatic calibration procedure and uncertainty analysis were conducted using the DiffeRential Evolution Adaptive Metropolis (DREAM) tool to simulate discharge, NO3--N and TP concentrations. A multi-site and multi-objective calibration approach was applied using three main gauging stations, covering the most important hydro-meteorological and physiographical characteristics of the whole catchment. Second, the model's capability was tested to represent further internal stations, which were not initially considered for calibration. Results showed that discharge was well represented by the model at all three main stations during both calibration (1994-1998) and validation (1999-2014) periods with lowest Nash-Sutcliffe Efficiency (NSE) of 0.71 and maximum Percentage BIAS (PBIAS) of 18.0\%. The model was able to reproduce the seasonal dynamics of NO3--N and TP concentrations with low predictive uncertainty at the three main stations, reflected by PBIAS values in the ranges from 16.1\% to 6.4\% and from 20.0\% to 11.5\% for NO3--N and TP load simulations, respectively. At internal stations, the model could represent reasonably well the seasonal variation of nutrient concentrations with PBIAS values in the ranges from 9.0\% to 14.2\% for NO3--N and from 25.3\% to 34.3\% for TP concentration simulations. Overall, results suggested that the spatial validation of a nutrient transport model can be better ensured when a multi-site and multi-objective calibration approach using archetypical gauging stations is implemented. Further, results revealed that the delineation of sub-catchments should put more focus on hydro-meteorological conditions than on land-use features.}, language = {en} } @article{KhuranaHesseKleidonHildebrandtetal.2022, author = {Khurana, Swamini and Hesse, Falk and Kleidon-Hildebrandt, Anke and Thullner, Martin}, title = {Should we worry about surficial dynamics when assessing nutrient cycling in the groundwater?}, series = {Frontiers in water}, volume = {4}, journal = {Frontiers in water}, publisher = {Frontiers Media}, address = {Lausanne}, issn = {2624-9375}, doi = {10.3389/frwa.2022.780297}, pages = {17}, year = {2022}, abstract = {The fluxes of water and solutes in the subsurface compartment of the Critical Zone are temporally dynamic and it is unclear how this impacts microbial mediated nutrient cycling in the spatially heterogeneous subsurface. To investigate this, we undertook numerical modeling, simulating the transport in a wide range of spatially heterogeneous domains, and the biogeochemical transformation of organic carbon and nitrogen compounds using a complex microbial community with four (4) distinct functional groups, in water saturated subsurface compartments. We performed a comprehensive uncertainty analysis accounting for varying residence times and spatial heterogeneity. While the aggregated removal of chemical species in the domains over the entire simulation period was approximately the same as that in steady state conditions, the sub-scale temporal variation of microbial biomass and chemical discharge from a domain depended strongly on the interplay of spatial heterogeneity and temporal dynamics of the forcing. We showed that the travel time and the Damkohler number (Da) can be used to predict the temporally varying chemical discharge from a spatially heterogeneous domain. In homogeneous domains, chemical discharge in temporally dynamic conditions could be double of that in the steady state conditions while microbial biomass varied up to 75\% of that in steady state conditions. In heterogeneous domains, the interquartile range of uncertainty in chemical discharge in reaction dominated systems (log(10)Da > 0) was double of that in steady state conditions. However, high heterogeneous domains resulted in outliers where chemical discharge could be as high as 10-20 times of that in steady state conditions in high flow periods. And in transport dominated systems (log(10)Da < 0), the chemical discharge could be half of that in steady state conditions in unusually low flow conditions. In conclusion, ignoring spatio-temporal heterogeneities in a numerical modeling approach may exacerbate inaccurate estimation of nutrient export and microbial biomass. The results are relevant to long-term field monitoring studies, and for homogeneous soil column-scale experiments investigating the role of temporal dynamics on microbial redox dynamics.}, language = {en} } @misc{SiegVogelMerzetal.2019, author = {Sieg, Tobias and Vogel, Kristin and Merz, Bruno and Kreibich, Heidi}, title = {Seamless Estimation of Hydrometeorological Risk Across Spatial Scales}, series = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, number = {743}, issn = {1866-8372}, doi = {10.25932/publishup-43534}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-435341}, pages = {574 -- 581}, year = {2019}, abstract = {Hydrometeorological hazards caused losses of approximately 110 billion U.S. Dollars in 2016 worldwide. Current damage estimations do not consider the uncertainties in a comprehensive way, and they are not consistent between spatial scales. Aggregated land use data are used at larger spatial scales, although detailed exposure data at the object level, such as openstreetmap.org, is becoming increasingly available across the globe.We present a probabilistic approach for object-based damage estimation which represents uncertainties and is fully scalable in space. The approach is applied and validated to company damage from the flood of 2013 in Germany. Damage estimates are more accurate compared to damage models using land use data, and the estimation works reliably at all spatial scales. Therefore, it can as well be used for pre-event analysis and risk assessments. This method takes hydrometeorological damage estimation and risk assessments to the next level, making damage estimates and their uncertainties fully scalable in space, from object to country level, and enabling the exploitation of new exposure data.}, language = {en} } @article{SiegVogelMerzetal.2019, author = {Sieg, Tobias and Vogel, Kristin and Merz, Bruno and Kreibich, Heidi}, title = {Seamless Estimation of Hydrometeorological Risk Across Spatial Scales}, series = {Earth's Future}, volume = {7}, journal = {Earth's Future}, number = {5}, publisher = {Wiley-Blackwell}, address = {Hoboken, NJ}, issn = {2328-4277}, doi = {10.1029/2018EF001122}, pages = {574 -- 581}, year = {2019}, abstract = {Hydrometeorological hazards caused losses of approximately 110 billion U.S. Dollars in 2016 worldwide. Current damage estimations do not consider the uncertainties in a comprehensive way, and they are not consistent between spatial scales. Aggregated land use data are used at larger spatial scales, although detailed exposure data at the object level, such as openstreetmap.org, is becoming increasingly available across the globe.We present a probabilistic approach for object-based damage estimation which represents uncertainties and is fully scalable in space. The approach is applied and validated to company damage from the flood of 2013 in Germany. Damage estimates are more accurate compared to damage models using land use data, and the estimation works reliably at all spatial scales. Therefore, it can as well be used for pre-event analysis and risk assessments. This method takes hydrometeorological damage estimation and risk assessments to the next level, making damage estimates and their uncertainties fully scalable in space, from object to country level, and enabling the exploitation of new exposure data.}, language = {en} } @phdthesis{Sieg2018, author = {Sieg, Tobias}, title = {Reliability of flood damage estimations across spatial scales}, doi = {10.25932/publishup-42616}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-426161}, school = {Universit{\"a}t Potsdam}, pages = {XIII, 115}, year = {2018}, abstract = {Extreme Naturereignisse sind ein integraler Bestandteil der Natur der Erde. Sie werden erst dann zu Gefahren f{\"u}r die Gesellschaft, wenn sie diesen Ereignissen ausgesetzt ist. Dann allerdings k{\"o}nnen Naturgefahren verheerende Folgen f{\"u}r die Gesellschaft haben. Besonders hydro-meteorologische Gefahren wie zum Beispiel Flusshochwasser, Starkregenereignisse, Winterst{\"u}rme, Orkane oder Tornados haben ein hohes Schadenspotential und treten rund um den Globus auf. Einhergehend mit einer immer w{\"a}rmer werdenden Welt, werden auch Extremwetterereignisse, welche potentiell Naturgefahren ausl{\"o}sen k{\"o}nnen, immer wahrscheinlicher. Allerdings tr{\"a}gt nicht nur eine sich ver{\"a}ndernde Umwelt zur Erh{\"o}hung des Risikos von Naturgefahren bei, sondern auch eine sich ver{\"a}ndernde Gesellschaft. Daher ist ein angemessenes Risikomanagement erforderlich um die Gesellschaft auf jeder r{\"a}umlichen Ebene an diese Ver{\"a}nderungen anzupassen. Ein essentieller Bestandteil dieses Managements ist die Absch{\"a}tzung der {\"o}konomischen Auswirkungen der Naturgefahren. Bisher allerdings fehlen verl{\"a}ssliche Methoden um die Auswirkungen von hydro-meteorologischen Gefahren abzusch{\"a}tzen. Ein Hauptbestandteil dieser Arbeit ist daher die Entwicklung und Anwendung einer neuen Methode, welche die Verl{\"a}sslichkeit der Schadenssch{\"a}tzung verbessert. Die Methode wurde beispielhaft zur Sch{\"a}tzung der {\"o}konomischen Auswirkungen eines Flusshochwassers auf einzelne Unternehmen bis hin zu den Auswirkungen auf das gesamte Wirtschaftssystem Deutschlands erfolgreich angewendet. Bestehende Methoden geben meist wenig Information {\"u}ber die Verl{\"a}sslichkeit ihrer Sch{\"a}tzungen. Da diese Informationen Entscheidungen zur Anpassung an das Risiko erleichtern, wird die Verl{\"a}sslichkeit der Schadenssch{\"a}tzungen mit der neuen Methode dargestellt. Die Verl{\"a}sslichkeit bezieht sich dabei nicht nur auf die Schadenssch{\"a}tzung selber, sondern auch auf die Annahmen, die {\"u}ber betroffene Geb{\"a}ude gemacht werden. Nach diesem Prinzip kann auch die Verl{\"a}sslichkeit von Annahmen {\"u}ber die Zukunft dargestellt werden, dies ist ein wesentlicher Aspekt f{\"u}r Prognosen. Die Darstellung der Verl{\"a}sslichkeit und die erfolgreiche Anwendung zeigt das Potential der Methode zur Verwendung von Analysen f{\"u}r gegenw{\"a}rtige und zuk{\"u}nftige hydro-meteorologische Gefahren.}, language = {en} } @article{KreibichBottoMerzetal.2016, author = {Kreibich, Heidi and Botto, Anna and Merz, Bruno and Schr{\"o}ter, Kai}, title = {Probabilistic, Multivariable Flood Loss Modeling on the Mesoscale with BT-FLEMO}, series = {Risk analysis}, volume = {37}, journal = {Risk analysis}, number = {4}, publisher = {Wiley}, address = {Hoboken}, issn = {0272-4332}, doi = {10.1111/risa.12650}, pages = {774 -- 787}, year = {2016}, abstract = {Flood loss modeling is an important component for risk analyses and decision support in flood risk management. Commonly, flood loss models describe complex damaging processes by simple, deterministic approaches like depth-damage functions and are associated with large uncertainty. To improve flood loss estimation and to provide quantitative information about the uncertainty associated with loss modeling, a probabilistic, multivariable Bagging decision Tree Flood Loss Estimation MOdel (BT-FLEMO) for residential buildings was developed. The application of BT-FLEMO provides a probability distribution of estimated losses to residential buildings per municipality. BT-FLEMO was applied and validated at the mesoscale in 19 municipalities that were affected during the 2002 flood by the River Mulde in Saxony, Germany. Validation was undertaken on the one hand via a comparison with six deterministic loss models, including both depth-damage functions and multivariable models. On the other hand, the results were compared with official loss data. BT-FLEMO outperforms deterministic, univariable, and multivariable models with regard to model accuracy, although the prediction uncertainty remains high. An important advantage of BT-FLEMO is the quantification of prediction uncertainty. The probability distribution of loss estimates by BT-FLEMO well represents the variation range of loss estimates of the other models in the case study.}, language = {en} } @phdthesis{Roezer2018, author = {R{\"o}zer, Viktor}, title = {Pluvial flood loss to private households}, doi = {10.25932/publishup-42991}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-429910}, school = {Universit{\"a}t Potsdam}, pages = {XXII, 109}, year = {2018}, abstract = {Today, more than half of the world's population lives in urban areas. With a high density of population and assets, urban areas are not only the economic, cultural and social hubs of every society, they are also highly susceptible to natural disasters. As a consequence of rising sea levels and an expected increase in extreme weather events caused by a changing climate in combination with growing cities, flooding is an increasing threat to many urban agglomerations around the globe. To mitigate the destructive consequences of flooding, appropriate risk management and adaptation strategies are required. So far, flood risk management in urban areas is almost exclusively focused on managing river and coastal flooding. Often overlooked is the risk from small-scale rainfall-triggered flooding, where the rainfall intensity of rainstorms exceeds the capacity of urban drainage systems, leading to immediate flooding. Referred to as pluvial flooding, this flood type exclusive to urban areas has caused severe losses in cities around the world. Without further intervention, losses from pluvial flooding are expected to increase in many urban areas due to an increase of impervious surfaces compounded with an aging drainage infrastructure and a projected increase in heavy precipitation events. While this requires the integration of pluvial flood risk into risk management plans, so far little is known about the adverse consequences of pluvial flooding due to a lack of both detailed data sets and studies on pluvial flood impacts. As a consequence, methods for reliably estimating pluvial flood losses, needed for pluvial flood risk assessment, are still missing. Therefore, this thesis investigates how pluvial flood losses to private households can be reliably estimated, based on an improved understanding of the drivers of pluvial flood loss. For this purpose, detailed data from pluvial flood-affected households was collected through structured telephone- and web-surveys following pluvial flood events in Germany and the Netherlands. Pluvial flood losses to households are the result of complex interactions between impact characteristics such as the water depth and a household's resistance as determined by its risk awareness, preparedness, emergency response, building properties and other influencing factors. Both exploratory analysis and machine-learning approaches were used to analyze differences in resistance and impacts between households and their effects on the resulting losses. The comparison of case studies showed that the awareness around pluvial flooding among private households is quite low. Low awareness not only challenges the effective dissemination of early warnings, but was also found to influence the implementation of private precautionary measures. The latter were predominately implemented by households with previous experience of pluvial flooding. Even cases where previous flood events affected a different part of the same city did not lead to an increase in preparedness of the surveyed households, highlighting the need to account for small-scale variability in both impact and resistance parameters when assessing pluvial flood risk. While it was concluded that the combination of low awareness, ineffective early warning and the fact that only a minority of buildings were adapted to pluvial flooding impaired the coping capacities of private households, the often low water levels still enabled households to mitigate or even prevent losses through a timely and effective emergency response. These findings were confirmed by the detection of loss-influencing variables, showing that cases in which households were able to prevent any loss to the building structure are predominately explained by resistance variables such as the household's risk awareness, while the degree of loss is mainly explained by impact variables. Based on the important loss-influencing variables detected, different flood loss models were developed. Similar to flood loss models for river floods, the empirical data from the preceding data collection was used to train flood loss models describing the relationship between impact and resistance parameters and the resulting loss to building structures. Different approaches were adapted from river flood loss models using both models with the water depth as only predictor for building structure loss and models incorporating additional variables from the preceding variable detection routine. The high predictive errors of all compared models showed that point predictions are not suitable for estimating losses on the building level, as they severely impair the reliability of the estimates. For that reason, a new probabilistic framework based on Bayesian inference was introduced that is able to provide predictive distributions instead of single loss estimates. These distributions not only give a range of probable losses, they also provide information on how likely a specific loss value is, representing the uncertainty in the loss estimate. Using probabilistic loss models, it was found that the certainty and reliability of a loss estimate on the building level is not only determined by the use of additional predictors as shown in previous studies, but also by the choice of response distribution defining the shape of the predictive distribution. Here, a mix between a beta and a Bernoulli distribution to account for households that are able to prevent losses to their building's structure was found to provide significantly more certain and reliable estimates than previous approaches using Gaussian or non-parametric response distributions. The successful model transfer and post-event application to estimate building structure loss in Houston, TX, caused by pluvial flooding during Hurricane Harvey confirmed previous findings, and demonstrated the potential of the newly developed multi-variable beta model for future risk assessments. The highly detailed input data set constructed from openly available data sources containing over 304,000 affected buildings in Harris County further showed the potential of data-driven, building-level loss models for pluvial flood risk assessment. In conclusion, pluvial flood losses to private households are the result of complex interactions between impact and resistance variables, which should be represented in loss models. The local occurrence of pluvial floods requires loss estimates on high spatial resolutions, i.e. on the building level, where losses are variable and uncertainties are high. Therefore, probabilistic loss estimates describing the uncertainty of the estimate should be used instead of point predictions. While the performance of probabilistic models on the building level are mainly driven by the choice of response distribution, multi-variable models are recommended for two reasons: First, additional resistance variables improve the detection of cases in which households were able to prevent structural losses. Second, the added variability of additional predictors provides a better representation of the uncertainties when loss estimates from multiple buildings are aggregated. This leads to the conclusion that data-driven probabilistic loss models on the building level allow for a reliable loss estimation at an unprecedented level of detail, with a consistent quantification of uncertainties on all aggregation levels. This makes the presented approach suitable for a wide range of applications, from decision support in spatial planning to impact- based early warning systems.}, language = {en} } @phdthesis{Schaber2002, author = {Schaber, J{\"o}rg}, title = {Phenology in Germany in the 20th century : methods, analyses and models}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-0000532}, school = {Universit{\"a}t Potsdam}, year = {2002}, abstract = {Die L{\"a}nge der Vegetationsperiode (VP) spielt eine zentrale Rolle f{\"u}r die interannuelle Variation der Kohlenstoffspeicherung terrestrischer {\"O}kosysteme. Die Analyse von Beobachtungsdaten hat gezeigt, dass sich die VP in den letzten Jahrzehnten in den n{\"o}rdlichen Breiten verl{\"a}ngert hat. Dieses Ph{\"a}nomen wurde oft im Zusammenhang mit der globalen Erw{\"a}rmung diskutiert, da die Ph{\"a}nologie von der Temperatur beeinflusst wird. Die Analyse der Pflanzenph{\"a}nologie in S{\"u}ddeutschland im 20. Jahrhundert zeigte: - Die starke Verfr{\"u}hung der Fr{\"u}hjahrsphasen in dem Jahrzehnt vor 1999 war kein singul{\"a}res Ereignis im 20. Jahrhundert. Schon in fr{\"u}heren Dekaden gab es {\"a}hnliche Trends. Es konnten Perioden mit unterschiedlichem Trendverhalten identifiziert werden. - Es gab deutliche Unterschiede in den Trends von fr{\"u}hen und sp{\"a}ten Fr{\"u}hjahrsphasen. Die fr{\"u}hen Fr{\"u}hjahrsphasen haben sich stetig verfr{\"u}ht, mit deutlicher Verfr{\"u}hung zwischen 1931 und 1948, moderater Verfr{\"u}hung zwischen 1948 und 1984 und starker Verfr{\"u}hung zwischen 1984 und 1999. Die sp{\"a}ten Fr{\"u}hjahrsphasen hingegen, wechselten ihr Trendverhalten in diesen Perioden von einer Verfr{\"u}hung zu einer deutlichen Versp{\"a}tung wieder zu einer starken Verfr{\"u}hung. Dieser Unterschied in der Trendentwicklung zwischen fr{\"u}hen und sp{\"a}ten Fr{\"u}hjahrsphasen konnte auch f{\"u}r ganz Deutschland in den Perioden 1951 bis 1984 und 1984 bis 1999 beobachtet werden. Der bestimmende Einfluss der Temperatur auf die Fr{\"u}hjahrsphasen und ihr modifizierender Einfluss auf die Herbstphasen konnte best{\"a}tigt werden. Es zeigt sich jedoch, dass - die Ph{\"a}nologie bestimmende Funktionen der Temperatur nicht mit einem globalen j{\"a}hrlichen CO2 Signal korreliert waren, welches als Index f{\"u}r die globale Erw{\"a}rmung verwendet wurde - ein Index f{\"u}r grossr{\"a}umige regionale Zirkulationsmuster (NAO-Index) nur zu einem kleinen Teil die beobachtete ph{\"a}nologischen Variabilit{\"a}t erkl{\"a}ren konnte. Das beobachtete unterschiedliche Trendverhalten zwischen fr{\"u}hen und sp{\"a}ten Fr{\"u}hjahrsphasen konnte auf die unterschiedliche Entwicklung von M{\"a}rz- und Apriltemperaturen zur{\"u}ckgef{\"u}hrt werden. W{\"a}hrend sich die M{\"a}rztemperaturen im Laufe des 20. Jahrhunderts mit einer zunehmenden Variabilit{\"a}t in den letzten 50 Jahren stetig erh{\"o}ht haben, haben sich die Apriltemperaturen zwischen dem Ende der 1940er und Mitte der 1980er merklich abgek{\"u}hlt und dann wieder deutlich erw{\"a}rmt. Es wurde geschlussfolgert, dass die Verfr{\"u}hungen in der Fr{\"u}hjahrsph{\"a}nologie in den letzten Dekaden Teile multi-dekadischer Fluktuationen sind, welche sich nach Spezies und relevanter saisonaler Temperatur unterscheiden. Aufgrund dieser Fluktuationen konnte kein Zusammenhang mit einem globalen Erw{\"a}rmungsignal gefunden werden. Im Durchschnitt haben sich alle betrachteten Fr{\"u}hjahrsphasen zwischen 1951 und 1999 in Naturr{\"a}umen in Deutschland zwischen 5 und 20 Tagen verfr{\"u}ht. Ein starker Unterschied in der Verfr{\"u}hung zwischen fr{\"u}hen und sp{\"a}ten Fr{\"u}hjahrsphasen liegt an deren erw{\"a}hntem unterschiedlichen Verhalten. Die Blattverf{\"a}rbung hat sich zwischen 1951 und 1999 f{\"u}r alle Spezies versp{\"a}tet, aber nach 1984 im Durchschnitt verfr{\"u}ht. Die VP hat sich in Deutschland zwischen 1951 und 1999 um ca. 10 Tage verl{\"a}ngert. Es ist haupts{\"a}chlich die {\"A}nderung in den Fr{\"u}hjahrphasen, die zu einer {\"A}nderung in der potentiell absorbierten Strahlung (PAS) f{\"u}hrt. Dar{\"u}ber hinaus sind es die sp{\"a}ten Fr{\"u}hjahrsphasen, die pro Tag Verfr{\"u}hung st{\"a}rker profitieren, da die zus{\"a}tzlichen Tage l{\"a}nger undw{\"a}rmer sind als dies f{\"u}r die fr{\"u}hen Phasen der Fall ist. Um die relative {\"A}nderung in PAS im Vergleich der Spezies abzusch{\"a}tzen, m{\"u}ssen allerdings auch die Ver{\"a}nderungen in den Herbstphasen ber{\"u}cksichtigt werden. Der deutliche Unterschied zwischen fr{\"u}hen und sp{\"a}ten Fr{\"u}hjahrsphasen konnte durch die Anwendung einer neuen Methode zur Konstruktion von Zeitreihen herausgearbeitet werden. Der neue methodische Ansatz erlaubte die Ableitung verl{\"a}sslicher 100-j{\"a}hriger Zeitreihen und die Konstruktion von lokalen kombinierten Zeitreihen, welche die Datenverf{\"u}gbarkeit f{\"u}r die Modellentwicklung erh{\"o}hten. Ausser analysierten Protokollierungsfehlern wurden mikroklimatische, genetische und Beobachtereinfl{\"u}sse als Quellen von Unsicherheit in ph{\"a}nologischen Daten identifiziert. Ph{\"a}nologischen Beobachtungen eines Ortes k{\"o}nnen sch{\"a}tzungsweise 24 Tage um das parametrische Mittel schwanken.Dies unterst{\"u}tzt die 30-Tage Regel f{\"u}r die Detektion von Ausreissern. Neue Ph{\"a}nologiemodelle, die den Blattaustrieb aus t{\"a}glichen Temperaturreihen simulieren, wurden entwickelt. Diese Modelle basieren auf einfachen Interaktionen zwischen aktivierenden und hemmenden Substanzen, welche die Entwicklungsstadien einer Pflanze bestimmen. Im Allgemeinen konnten die neuen Modelle die Beobachtungsdaten besser simulieren als die klassischen Modelle. Weitere Hauptresultate waren: - Der Bias der klassischen Modelle, d.h. {\"U}bersch{\"a}tzung von fr{\"u}hen und Untersch{\"a}tzung von sp{\"a}ten Beobachtungen, konnte reduziert, aber nicht vollst{\"a}ndig eliminiert werden. - Die besten Modellvarianten f{\"u}r verschiedene Spezies wiesen darauf hin, dass f{\"u}r die sp{\"a}ten Fr{\"u}hjahrsphasen die Tagesl{\"a}nge eine wichtigere Rolle spielt als f{\"u}r die fr{\"u}hen Phasen. - Die Vernalisation spielte gegen{\"u}ber den Temperaturen kurz vor dem Blattaustrieb nur eine untergeordnete Rolle.}, language = {en} } @phdthesis{Kox2018, author = {Kox, Thomas}, title = {Perception and use of uncertainty in severe weather warnings}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-411541}, school = {Universit{\"a}t Potsdam}, pages = {154}, year = {2018}, abstract = {Uncertainty is an essential part of atmospheric processes and thus inherent to weather forecasts. Nevertheless, weather forecasts and warnings are still predominately issued as deterministic (yes or no) forecasts, although research suggests that providing weather forecast users with additional information about the forecast uncertainty can enhance the preparation of mitigation measures. Communicating forecast uncertainty would allow for a provision of information on possible future events at an earlier time. The desired benefit is to enable the users to start with preparatory protective action at an earlier stage of time based on the their own risk assessment and decision threshold. But not all users have the same threshold for taking action. In the course of the project WEXICOM ('Wetterwarnungen: Von der Extremereignis-Information zu Kommunikation und Handlung') funded by the Deutscher Wetterdienst (DWD), three studies were conducted between the years 2012 and 2016 to reveal how weather forecasts and warnings are reflected in weather-related decision-making. The studies asked which factors influence the perception of forecasts and the decision to take protective action and how forecast users make sense of probabilistic information and the additional lead time. In a first exploratory study conducted in 2012, members of emergency services in Germany were asked questions about how weather warnings are communicated to professional endusers in the emergency community and how the warnings are converted into mitigation measures. A large number of open questions were selected to identify new topics of interest. The questions covered topics like users' confidence in forecasts, their understanding of probabilistic information as well as their lead time and decision thresholds to start with preparatory mitigation measures. Results show that emergency service personnel generally have a good sense of uncertainty inherent in weather forecasts. Although no single probability threshold could be identified for organisations to start with preparatory mitigation measures, it became clear that emergency services tend to avoid forecasts based on low probabilities as a basis for their decisions. Based on this findings, a second study conducted with residents of Berlin in 2014 further investigated the question of decision thresholds. The survey questions related to the topics of the perception of and prior experience with severe weather, trustworthiness of forecasters and confidence in weather forecasts, and socio-demographic and social-economic characteristics. Within the questionnaire a scenario was created to determine individual decision thresholds and see whether subgroups of the sample lead to different thresholds. The results show that people's willingness to act tends to be higher and decision thresholds tend to be lower if the expected weather event is more severe or the property at risk is of higher value. Several influencing factors of risk perception have significant effects such as education, housing status and ability to act, whereas socio-demographic determinants alone are often not sufficient to fully grasp risk perception and protection behaviour. Parallel to the quantitative studies, an interview study was conducted with 27 members of German civil protection between 2012 and 2016. The results show that the latest developments in (numerical) weather forecasting do not necessarily fit the current practice of German emergency services. These practices are mostly carried out on alarms and ground truth in a reactive manner rather than on anticipation based on prognosis or forecasts. As the potential consequences rather than the event characteristics determine protective action, the findings support the call and need for impact-based warnings. Forecasters will rely on impact data and need to learn the users' understanding of impact. Therefore, it is recommended to enhance weather communication not only by improving computer models and observation tools, but also by focusing on the aspects of communication and collaboration. Using information about uncertainty demands awareness about and acceptance of the limits of knowledge, hence, the capabilities of the forecaster to anticipate future developments of the atmosphere and the capabilities of the user to make sense of this information.}, language = {en} } @article{vanderAaLeopoldWeidlich2020, author = {van der Aa, Han and Leopold, Henrik and Weidlich, Matthias}, title = {Partial order resolution of event logs for process conformance checking}, series = {Decision support systems : DSS}, volume = {136}, journal = {Decision support systems : DSS}, publisher = {Elsevier}, address = {Amsterdam [u.a.]}, issn = {0167-9236}, doi = {10.1016/j.dss.2020.113347}, pages = {12}, year = {2020}, abstract = {While supporting the execution of business processes, information systems record event logs. Conformance checking relies on these logs to analyze whether the recorded behavior of a process conforms to the behavior of a normative specification. A key assumption of existing conformance checking techniques, however, is that all events are associated with timestamps that allow to infer a total order of events per process instance. Unfortunately, this assumption is often violated in practice. Due to synchronization issues, manual event recordings, or data corruption, events are only partially ordered. In this paper, we put forward the problem of partial order resolution of event logs to close this gap. It refers to the construction of a probability distribution over all possible total orders of events of an instance. To cope with the order uncertainty in real-world data, we present several estimators for this task, incorporating different notions of behavioral abstraction. Moreover, to reduce the runtime of conformance checking based on partial order resolution, we introduce an approximation method that comes with a bounded error in terms of accuracy. Our experiments with real-world and synthetic data reveal that our approach improves accuracy over the state-of-the-art considerably.}, language = {en} } @article{WagenerReineckePianosi2022, author = {Wagener, Thorsten and Reinecke, Robert and Pianosi, Francesca}, title = {On the evaluation of climate change impact models}, series = {Wiley interdisciplinary reviews : Climate change}, volume = {13}, journal = {Wiley interdisciplinary reviews : Climate change}, number = {3}, publisher = {Wiley}, address = {Hoboken}, issn = {1757-7780}, doi = {10.1002/wcc.772}, pages = {13}, year = {2022}, abstract = {In-depth understanding of the potential implications of climate change is required to guide decision- and policy-makers when developing adaptation strategies and designing infrastructure suitable for future conditions. Impact models that translate potential future climate conditions into variables of interest are needed to create the causal connection between a changing climate and its impact for different sectors. Recent surveys suggest that the primary strategy for validating such models (and hence for justifying their use) heavily relies on assessing the accuracy of model simulations by comparing them against historical observations. We argue that such a comparison is necessary and valuable, but not sufficient to achieve a comprehensive evaluation of climate change impact models. We believe that a complementary, largely observation-independent, step of model evaluation is needed to ensure more transparency of model behavior and greater robustness of scenario-based analyses. This step should address the following four questions: (1) Do modeled dominant process controls match our system perception? (2) Is my model's sensitivity to changing forcing as expected? (3) Do modeled decision levers show adequate influence? (4) Can we attribute uncertainty sources throughout the projection horizon? We believe that global sensitivity analysis, with its ability to investigate a model's response to joint variations of multiple inputs in a structured way, offers a coherent approach to address all four questions comprehensively. Such additional model evaluation would strengthen stakeholder confidence in model projections and, therefore, into the adaptation strategies derived with the help of impact models. This article is categorized under: Climate Models and Modeling > Knowledge Generation with Models Assessing Impacts of Climate Change > Evaluating Future Impacts of Climate Change}, language = {en} } @article{BussKearney2024, author = {Buss, Martin and Kearney, Eric}, title = {Navigating the unknown}, series = {Journal of occupational and organizational psychology}, journal = {Journal of occupational and organizational psychology}, publisher = {Wiley}, address = {Hoboken, NJ}, issn = {0963-1798}, doi = {10.1111/joop.12500}, pages = {7}, year = {2024}, abstract = {Visionary leadership is considered to be one of the most important elements of effective leadership. Among other things, it is related to followers' perceived meaningfulness of their work. However, little is known about whether uncertainty in the workplace affects visionary leadership's effects. Given that uncertainty is rising in many, if not most, workplaces, it is vital to understand whether this development influences the extent to which visionary leadership is associated with followers' perceived meaningfulness. In a two-source, lagged design field study of 258 leader-follower dyads from different settings, we show that uncertainty moderates the relation between visionary leadership and followers' perceived meaningfulness such that this relation is more strongly positive when uncertainty is high, rather than low. Moreover, we show that with increasing uncertainty, visionary leadership is more negatively related to followers' turnover intentions via perceived meaningfulness. This research broadens our understanding of how visionary leadership may be a particularly potent tool in times of increasing uncertainty.}, language = {en} } @article{PathirajaLeeuwen2022, author = {Pathiraja, Sahani Darschika and Leeuwen, Peter Jan van}, title = {Multiplicative Non-Gaussian model error estimation in data assimilation}, series = {Journal of advances in modeling earth systems : JAMES}, volume = {14}, journal = {Journal of advances in modeling earth systems : JAMES}, number = {4}, publisher = {American Geophysical Union}, address = {Washington}, issn = {1942-2466}, doi = {10.1029/2021MS002564}, pages = {23}, year = {2022}, abstract = {Model uncertainty quantification is an essential component of effective data assimilation. Model errors associated with sub-grid scale processes are often represented through stochastic parameterizations of the unresolved process. Many existing Stochastic Parameterization schemes are only applicable when knowledge of the true sub-grid scale process or full observations of the coarse scale process are available, which is typically not the case in real applications. We present a methodology for estimating the statistics of sub-grid scale processes for the more realistic case that only partial observations of the coarse scale process are available. Model error realizations are estimated over a training period by minimizing their conditional sum of squared deviations given some informative covariates (e.g., state of the system), constrained by available observations and assuming that the observation errors are smaller than the model errors. From these realizations a conditional probability distribution of additive model errors given these covariates is obtained, allowing for complex non-Gaussian error structures. Random draws from this density are then used in actual ensemble data assimilation experiments. We demonstrate the efficacy of the approach through numerical experiments with the multi-scale Lorenz 96 system using both small and large time scale separations between slow (coarse scale) and fast (fine scale) variables. The resulting error estimates and forecasts obtained with this new method are superior to those from two existing methods.}, language = {en} } @phdthesis{Zhou2024, author = {Zhou, Xiangqian}, title = {Modeling of spatially distributed nitrate transport to investigate the effects of drought and river restoration in the Bode catchment, Central Germany}, doi = {10.25932/publishup-62105}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-621059}, school = {Universit{\"a}t Potsdam}, pages = {VIII, 168}, year = {2024}, abstract = {The European Water Framework Directive (WFD) has identified river morphological alteration and diffuse pollution as the two main pressures affecting water bodies in Europe at the catchment scale. Consequently, river restoration has become a priority to achieve the WFD's objective of good ecological status. However, little is known about the effects of stream morphological changes, such as re-meandering, on in-stream nitrate retention at the river network scale. Therefore, catchment nitrate modeling is necessary to guide the implementation of spatially targeted and cost-effective mitigation measures. Meanwhile, Germany, like many other regions in central Europe, has experienced consecutive summer droughts from 2015-2018, resulting in significant changes in river nitrate concentrations in various catchments. However, the mechanistic exploration of catchment nitrate responses to changing weather conditions is still lacking. Firstly, a fully distributed, process-based catchment Nitrate model (mHM-Nitrate) was used, which was properly calibrated and comprehensively evaluated at numerous spatially distributed nitrate sampling locations. Three calibration schemes were designed, taking into account land use, stream order, and mean nitrate concentrations, and they varied in spatial coverage but used data from the same period (2011-2019). The model performance for discharge was similar among the three schemes, with Nash-Sutcliffe Efficiency (NSE) scores ranging from 0.88 to 0.92. However, for nitrate concentrations, scheme 2 outperformed schemes 1 and 3 when compared to observed data from eight gauging stations. This was likely because scheme 2 incorporated a diverse range of data, including low discharge values and nitrate concentrations, and thus provided a better representation of within-catchment heterogenous. Therefore, the study suggests that strategically selecting gauging stations that reflect the full range of within-catchment heterogeneity is more important for calibration than simply increasing the number of stations. Secondly, the mHM-Nitrate model was used to reveal the causal relations between sequential droughts and nitrate concentration in the Bode catchment (3200 km2) in central Germany, where stream nitrate concentrations exhibited contrasting trends from upstream to downstream reaches. The model was evaluated using data from six gauging stations, reflecting different levels of runoff components and their associated nitrate-mixing from upstream to downstream. Results indicated that the mHM-Nitrate model reproduced dynamics of daily discharge and nitrate concentration well, with Nash-Sutcliffe Efficiency ≥ 0.73 for discharge and Kling-Gupta Efficiency ≥ 0.50 for nitrate concentration at most stations. Particularly, the spatially contrasting trends of nitrate concentration were successfully captured by the model. The decrease of nitrate concentration in the lowland area in drought years (2015-2018) was presumably due to (1) limited terrestrial export loading (ca. 40\% lower than that of normal years 2004-2014), and (2) increased in-stream retention efficiency (20\% higher in summer within the whole river network). From a mechanistic modelling perspective, this study provided insights into spatially heterogeneous flow and nitrate dynamics and effects of sequential droughts, which shed light on water-quality responses to future climate change, as droughts are projected to be more frequent. Thirdly, this study investigated the effects of stream restoration via re-meandering on in-stream nitrate retention at network-scale in the well-monitored Bode catchment. The mHM-Nitrate model showed good performance in reproducing daily discharge and nitrate concentrations, with median Kling-Gupta values of 0.78 and 0.74, respectively. The mean and standard deviation of gross nitrate retention efficiency, which accounted for both denitrification and assimilatory uptake, were 5.1 ± 0.61\% and 74.7 ± 23.2\% in winter and summer, respectively, within the stream network. The study found that in the summer, denitrification rates were about two times higher in lowland sub-catchments dominated by agricultural lands than in mountainous sub-catchments dominated by forested areas, with median ± SD of 204 ± 22.6 and 102 ± 22.1 mg N m-2 d-1, respectively. Similarly, assimilatory uptake rates were approximately five times higher in streams surrounded by lowland agricultural areas than in those in higher-elevation, forested areas, with median ± SD of 200 ± 27.1 and 39.1 ± 8.7 mg N m-2 d-1, respectively. Therefore, restoration strategies targeting lowland agricultural areas may have greater potential for increasing nitrate retention. The study also found that restoring stream sinuosity could increase net nitrate retention efficiency by up to 25.4 ± 5.3\%, with greater effects seen in small streams. These results suggest that restoration efforts should consider augmenting stream sinuosity to increase nitrate retention and decrease nitrate concentrations at the catchment scale.}, language = {en} } @phdthesis{Zurell2011, author = {Zurell, Damaris}, title = {Integrating dynamic and statistical modelling approaches in order to improve predictions for scenarios of environmental change}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-56845}, school = {Universit{\"a}t Potsdam}, year = {2011}, abstract = {Species respond to environmental change by dynamically adjusting their geographical ranges. Robust predictions of these changes are prerequisites to inform dynamic and sustainable conservation strategies. Correlative species distribution models (SDMs) relate species' occurrence records to prevailing environmental factors to describe the environmental niche. They have been widely applied in global change context as they have comparably low data requirements and allow for rapid assessments of potential future species' distributions. However, due to their static nature, transient responses to environmental change are essentially ignored in SDMs. Furthermore, neither dispersal nor demographic processes and biotic interactions are explicitly incorporated. Therefore, it has often been suggested to link statistical and mechanistic modelling approaches in order to make more realistic predictions of species' distributions for scenarios of environmental change. In this thesis, I present two different ways of such linkage. (i) Mechanistic modelling can act as virtual playground for testing statistical models and allows extensive exploration of specific questions. I promote this 'virtual ecologist' approach as a powerful evaluation framework for testing sampling protocols, analyses and modelling tools. Also, I employ such an approach to systematically assess the effects of transient dynamics and ecological properties and processes on the prediction accuracy of SDMs for climate change projections. That way, relevant mechanisms are identified that shape the species' response to altered environmental conditions and which should hence be considered when trying to project species' distribution through time. (ii) I supplement SDM projections of potential future habitat for black grouse in Switzerland with an individual-based population model. By explicitly considering complex interactions between habitat availability and demographic processes, this allows for a more direct assessment of expected population response to environmental change and associated extinction risks. However, predictions were highly variable across simulations emphasising the need for principal evaluation tools like sensitivity analysis to assess uncertainty and robustness in dynamic range predictions. Furthermore, I identify data coverage of the environmental niche as a likely cause for contrasted range predictions between SDM algorithms. SDMs may fail to make reliable predictions for truncated and edge niches, meaning that portions of the niche are not represented in the data or niche edges coincide with data limits. Overall, my thesis contributes to an improved understanding of uncertainty factors in predictions of range dynamics and presents ways how to deal with these. Finally I provide preliminary guidelines for predictive modelling of dynamic species' response to environmental change, identify key challenges for future research and discuss emerging developments.}, language = {en} } @phdthesis{Schroeter2020, author = {Schr{\"o}ter, Kai}, title = {Improved flood risk assessment}, doi = {10.25932/publishup-48024}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-480240}, school = {Universit{\"a}t Potsdam}, pages = {408}, year = {2020}, abstract = {Rivers have always flooded their floodplains. Over 2.5 billion people worldwide have been affected by flooding in recent decades. The economic damage is also considerable, averaging 100 billion US dollars per year. There is no doubt that damage and other negative effects of floods can be avoided. However, this has a price: financially and politically. Costs and benefits can be estimated through risk assessments. Questions about the location and frequency of floods, about the objects that could be affected and their vulnerability are of importance for flood risk managers, insurance companies and politicians. Thus, both variables and factors from the fields of hydrology and sociol-economics play a role with multi-layered connections. One example are dikes along a river, which on the one hand contain floods, but on the other hand, by narrowing the natural floodplains, accelerate the flood discharge and increase the danger of flooding for the residents downstream. Such larger connections must be included in the assessment of flood risk. However, in current procedures this is accompanied by simplifying assumptions. Risk assessments are therefore fuzzy and associated with uncertainties. This thesis investigates the benefits and possibilities of new data sources for improving flood risk assessment. New methods and models are developed, which take the mentioned interrelations better into account and also quantify the existing uncertainties of the model results, and thus enable statements about the reliability of risk estimates. For this purpose, data on flood events from various sources are collected and evaluated. This includes precipitation and flow records at measuring stations as well as for instance images from social media, which can help to delineate the flooded areas and estimate flood damage with location information. Machine learning methods have been successfully used to recognize and understand correlations between floods and impacts from a wide range of data and to develop improved models. Risk models help to develop and evaluate strategies to reduce flood risk. These tools also provide advanced insights into the interplay of various factors and on the expected consequences of flooding. This work shows progress in terms of an improved assessment of flood risks by using diverse data from different sources with innovative methods as well as by the further development of models. Flood risk is variable due to economic and climatic changes, and other drivers of risk. In order to keep the knowledge about flood risks up-to-date, robust, efficient and adaptable methods as proposed in this thesis are of increasing importance.}, language = {en} } @misc{DormannSchymanskiCabraletal.2012, author = {Dormann, Carsten F. and Schymanski, Stanislaus J. and Cabral, Juliano Sarmento and Chuine, Isabelle and Graham, Catherine and Hartig, Florian and Kearney, Michael and Morin, Xavier and R{\"o}mermann, Christine and Schr{\"o}der-Esselbach, Boris and Singer, Alexander}, title = {Correlation and process in species distribution models: bridging a dichotomy}, series = {Journal of biogeography}, volume = {39}, journal = {Journal of biogeography}, number = {12}, publisher = {Wiley-Blackwell}, address = {Hoboken}, issn = {0305-0270}, doi = {10.1111/j.1365-2699.2011.02659.x}, pages = {2119 -- 2131}, year = {2012}, abstract = {Within the field of species distribution modelling an apparent dichotomy exists between process-based and correlative approaches, where the processes are explicit in the former and implicit in the latter. However, these intuitive distinctions can become blurred when comparing species distribution modelling approaches in more detail. In this review article, we contrast the extremes of the correlativeprocess spectrum of species distribution models with respect to core assumptions, model building and selection strategies, validation, uncertainties, common errors and the questions they are most suited to answer. The extremes of such approaches differ clearly in many aspects, such as model building approaches, parameter estimation strategies and transferability. However, they also share strengths and weaknesses. We show that claims of one approach being intrinsically superior to the other are misguided and that they ignore the processcorrelation continuum as well as the domains of questions that each approach is addressing. Nonetheless, the application of process-based approaches to species distribution modelling lags far behind more correlative (process-implicit) methods and more research is required to explore their potential benefits. Critical issues for the employment of species distribution modelling approaches are given, together with a guideline for appropriate usage. We close with challenges for future development of process-explicit species distribution models and how they may complement current approaches to study species distributions.}, language = {en} } @article{McDowellKause2021, author = {McDowell, Michelle and Kause, Astrid}, title = {Communicating uncertainties about the effects of medical interventions using different display formats}, series = {Risk analysis : an international journal}, volume = {41}, journal = {Risk analysis : an international journal}, number = {12}, publisher = {Wiley}, address = {Hoboken}, issn = {0272-4332}, doi = {10.1111/risa.13739}, pages = {2220 -- 2239}, year = {2021}, abstract = {Communicating uncertainties in scientific evidence is important to accurately reflect scientific knowledge , increase public understanding of uncertainty, and to signal transparency and honesty in reporting. While techniques have been developed to facilitate the communication of uncertainty, many have not been empirically tested, compared for communicating different types of uncertainty, or their effects on different cognitive, trust, and behavioral outcomes have not been evaluated. The present study examined how a point estimate, imprecise estimate, conflicting estimates, or a statement about the lack of evidence about treatment effects, influenced participant's responses to communications about medical evidence. For each type of uncertainty, we adapted three display formats to communicate the information: tables, bar graphs, and icon arrays. We compared participant's best estimates of treatment effects, as well as effects on recall, subjective evaluations (understandability and usefuleness), certainty perceptions, perceptions of trustworthiness of the information, and behavioral intentions. We did not find any detrimental effects from communicating imprecision or conflicting estimates relative to a point estimate across any outcome. Furthermore, there were more favorable responses to communicating imprecision or conflicting estimates relative to lack of evidence, where participants estimated the treatment would improve outcomes by 30-50\% relative to a placebo. There were no differences across display formats, suggesting that, if well-designed, it may not matter which format is used. Future research on specific display formats or uncertainty types and with larger sample sizes would be needed to detect small effects. Implications for the communication of uncertainty are discussed.}, language = {en} } @misc{Korup2020, author = {Korup, Oliver}, title = {Bayesian geomorphology}, series = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, journal = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, number = {1}, issn = {1866-8372}, doi = {10.25932/publishup-53989}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-539892}, pages = {24}, year = {2020}, abstract = {The rapidly growing amount and diversity of data are confronting us more than ever with the need to make informed predictions under uncertainty. The adverse impacts of climate change and natural hazards also motivate our search for reliable predictions. The range of statistical techniques that geomorphologists use to tackle this challenge has been growing, but rarely involves Bayesian methods. Instead, many geomorphic models rely on estimated averages that largely miss out on the variability of form and process. Yet seemingly fixed estimates of channel heads, sediment rating curves or glacier equilibrium lines, for example, are all prone to uncertainties. Neighbouring scientific disciplines such as physics, hydrology or ecology have readily embraced Bayesian methods to fully capture and better explain such uncertainties, as the necessary computational tools have advanced greatly. The aim of this article is to introduce the Bayesian toolkit to scientists concerned with Earth surface processes and landforms, and to show how geomorphic models might benefit from probabilistic concepts. I briefly review the use of Bayesian reasoning in geomorphology, and outline the corresponding variants of regression and classification in several worked examples.}, language = {en} } @article{Korup2020, author = {Korup, Oliver}, title = {Bayesian geomorphology}, series = {Earth surface processes and landforms : the journal of the British Geomorphological Research Group}, volume = {46}, journal = {Earth surface processes and landforms : the journal of the British Geomorphological Research Group}, number = {1}, publisher = {Wiley}, address = {Hoboken}, issn = {0197-9337}, doi = {10.1002/esp.4995}, pages = {151 -- 172}, year = {2020}, abstract = {The rapidly growing amount and diversity of data are confronting us more than ever with the need to make informed predictions under uncertainty. The adverse impacts of climate change and natural hazards also motivate our search for reliable predictions. The range of statistical techniques that geomorphologists use to tackle this challenge has been growing, but rarely involves Bayesian methods. Instead, many geomorphic models rely on estimated averages that largely miss out on the variability of form and process. Yet seemingly fixed estimates of channel heads, sediment rating curves or glacier equilibrium lines, for example, are all prone to uncertainties. Neighbouring scientific disciplines such as physics, hydrology or ecology have readily embraced Bayesian methods to fully capture and better explain such uncertainties, as the necessary computational tools have advanced greatly. The aim of this article is to introduce the Bayesian toolkit to scientists concerned with Earth surface processes and landforms, and to show how geomorphic models might benefit from probabilistic concepts. I briefly review the use of Bayesian reasoning in geomorphology, and outline the corresponding variants of regression and classification in several worked examples.}, language = {en} } @article{SurethKalkuhlEdenhoferetal.2023, author = {Sureth, Michael and Kalkuhl, Matthias and Edenhofer, Ottmar and Rockstr{\"o}m, Johan}, title = {A welfare economic approach to planetary boundaries}, series = {Jahrb{\"u}cher f{\"u}r National{\"o}konomie und Statistik}, volume = {243}, journal = {Jahrb{\"u}cher f{\"u}r National{\"o}konomie und Statistik}, number = {5}, publisher = {De Gruyter Oldenbourg}, address = {Berlin}, issn = {0021-4027}, doi = {10.1515/jbnst-2022-0022}, pages = {477 -- 542}, year = {2023}, abstract = {The crises of both the climate and the biosphere are manifestations of the imbalance between human extractive, and polluting activities and the Earth's regenerative capacity. Planetary boundaries define limits for biophysical systems and processes that regulate the stability and life support capacity of the Earth system, and thereby also define a safe operating space for humanity on Earth. Budgets associated to planetary boundaries can be understood as global commons: common pool resources that can be utilized within finite limits. Despite the analytical interpretation of planetary boundaries as global commons, the planetary boundaries framework is missing a thorough integration into economic theory. We aim to bridge the gap between welfare economic theory and planetary boundaries as derived in the natural sciences by presenting a unified theory of cost-benefit and cost-effectiveness analysis. Our pragmatic approach aims to overcome shortcomings of the practical applications of CEA and CBA to environmental problems of a planetary scale. To do so, we develop a model framework and explore decision paradigms that give guidance to setting limits on human activities. This conceptual framework is then applied to planetary boundaries. We conclude by using the realized insights to derive a research agenda that builds on the understanding of planetary boundaries as global commons.}, language = {en} } @misc{RolinskiRammigWalzetal.2014, author = {Rolinski, Susanne and Rammig, Anja and Walz, Ariane and von Bloh, Werner and van Oijen, M. and Thonicke, Kirsten}, title = {A probabilistic risk assessment for the vulnerability of the European carbon cycle to weather extremes}, series = {Postprints der Universit{\"a}t Potsdam : Mathematisch naturwissenschaftliche Reihe (487)}, journal = {Postprints der Universit{\"a}t Potsdam : Mathematisch naturwissenschaftliche Reihe (487)}, number = {487}, issn = {1866-8372}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-407999}, pages = {1813 -- 1831}, year = {2014}, abstract = {Extreme weather events are likely to occur more often under climate change and the resulting effects on ecosystems could lead to a further acceleration of climate change. But not all extreme weather events lead to extreme ecosystem response. Here, we focus on hazardous ecosystem behaviour and identify coinciding weather conditions. We use a simple probabilistic risk assessment based on time series of ecosystem behaviour and climate conditions. Given the risk assessment terminology, vulnerability and risk for the previously defined hazard are estimated on the basis of observed hazardous ecosystem behaviour. We apply this approach to extreme responses of terrestrial ecosystems to drought, defining the hazard as a negative net biome productivity over a 12-month period. We show an application for two selected sites using data for 1981-2010 and then apply the method to the pan-European scale for the same period, based on numerical modelling results (LPJmL for ecosystem behaviour; ERA-Interim data for climate). Our site-specific results demonstrate the applicability of the proposed method, using the SPEI to describe the climate condition. The site in Spain provides an example of vulnerability to drought because the expected value of the SPEI is 0.4 lower for hazardous than for non-hazardous ecosystem behaviour. In northern Germany, on the contrary, the site is not vulnerable to drought because the SPEI expectation values imply wetter conditions in the hazard case than in the non-hazard case. At the pan-European scale, ecosystem vulnerability to drought is calculated in the Mediterranean and temperate region, whereas Scandinavian ecosystems are vulnerable under conditions without water shortages. These first model- based applications indicate the conceptual advantages of the proposed method by focusing on the identification of critical weather conditions for which we observe hazardous ecosystem behaviour in the analysed data set. Application of the method to empirical time series and to future climate would be important next steps to test the approach.}, language = {en} } @article{WeatherillCotton2020, author = {Weatherill, Graeme and Cotton, Fabrice}, title = {A ground motion logic tree for seismic hazard analysis in the stable cratonic region of Europe}, series = {Bulletin of earthquake engineering : official publication of the European Association for Earthquake Engineering}, volume = {18}, journal = {Bulletin of earthquake engineering : official publication of the European Association for Earthquake Engineering}, number = {14}, publisher = {Springer Science + Business Media B.V.}, address = {Dordrecht}, issn = {1570-761X}, doi = {10.1007/s10518-020-00940-x}, pages = {6119 -- 6148}, year = {2020}, abstract = {Regions of low seismicity present a particular challenge for probabilistic seismic hazard analysis when identifying suitable ground motion models (GMMs) and quantifying their epistemic uncertainty. The 2020 European Seismic Hazard Model adopts a scaled backbone approach to characterise this uncertainty for shallow seismicity in Europe, incorporating region-to-region source and attenuation variability based on European strong motion data. This approach, however, may not be suited to stable cratonic region of northeastern Europe (encompassing Finland, Sweden and the Baltic countries), where exploration of various global geophysical datasets reveals that its crustal properties are distinctly different from the rest of Europe, and are instead more closely represented by those of the Central and Eastern United States. Building upon the suite of models developed by the recent NGA East project, we construct a new scaled backbone ground motion model and calibrate its corresponding epistemic uncertainties. The resulting logic tree is shown to provide comparable hazard outcomes to the epistemic uncertainty modelling strategy adopted for the Eastern United States, despite the different approaches taken. Comparison with previous GMM selections for northeastern Europe, however, highlights key differences in short period accelerations resulting from new assumptions regarding the characteristics of the reference rock and its influence on site amplification.}, language = {en} }