@article{PathirajaLeeuwen2022, author = {Pathiraja, Sahani Darschika and Leeuwen, Peter Jan van}, title = {Multiplicative Non-Gaussian model error estimation in data assimilation}, series = {Journal of advances in modeling earth systems : JAMES}, volume = {14}, journal = {Journal of advances in modeling earth systems : JAMES}, number = {4}, publisher = {American Geophysical Union}, address = {Washington}, issn = {1942-2466}, doi = {10.1029/2021MS002564}, pages = {23}, year = {2022}, abstract = {Model uncertainty quantification is an essential component of effective data assimilation. Model errors associated with sub-grid scale processes are often represented through stochastic parameterizations of the unresolved process. Many existing Stochastic Parameterization schemes are only applicable when knowledge of the true sub-grid scale process or full observations of the coarse scale process are available, which is typically not the case in real applications. We present a methodology for estimating the statistics of sub-grid scale processes for the more realistic case that only partial observations of the coarse scale process are available. Model error realizations are estimated over a training period by minimizing their conditional sum of squared deviations given some informative covariates (e.g., state of the system), constrained by available observations and assuming that the observation errors are smaller than the model errors. From these realizations a conditional probability distribution of additive model errors given these covariates is obtained, allowing for complex non-Gaussian error structures. Random draws from this density are then used in actual ensemble data assimilation experiments. We demonstrate the efficacy of the approach through numerical experiments with the multi-scale Lorenz 96 system using both small and large time scale separations between slow (coarse scale) and fast (fine scale) variables. The resulting error estimates and forecasts obtained with this new method are superior to those from two existing methods.}, language = {en} } @article{KhuranaHesseKleidonHildebrandtetal.2022, author = {Khurana, Swamini and Hesse, Falk and Kleidon-Hildebrandt, Anke and Thullner, Martin}, title = {Should we worry about surficial dynamics when assessing nutrient cycling in the groundwater?}, series = {Frontiers in water}, volume = {4}, journal = {Frontiers in water}, publisher = {Frontiers Media}, address = {Lausanne}, issn = {2624-9375}, doi = {10.3389/frwa.2022.780297}, pages = {17}, year = {2022}, abstract = {The fluxes of water and solutes in the subsurface compartment of the Critical Zone are temporally dynamic and it is unclear how this impacts microbial mediated nutrient cycling in the spatially heterogeneous subsurface. To investigate this, we undertook numerical modeling, simulating the transport in a wide range of spatially heterogeneous domains, and the biogeochemical transformation of organic carbon and nitrogen compounds using a complex microbial community with four (4) distinct functional groups, in water saturated subsurface compartments. We performed a comprehensive uncertainty analysis accounting for varying residence times and spatial heterogeneity. While the aggregated removal of chemical species in the domains over the entire simulation period was approximately the same as that in steady state conditions, the sub-scale temporal variation of microbial biomass and chemical discharge from a domain depended strongly on the interplay of spatial heterogeneity and temporal dynamics of the forcing. We showed that the travel time and the Damkohler number (Da) can be used to predict the temporally varying chemical discharge from a spatially heterogeneous domain. In homogeneous domains, chemical discharge in temporally dynamic conditions could be double of that in the steady state conditions while microbial biomass varied up to 75\% of that in steady state conditions. In heterogeneous domains, the interquartile range of uncertainty in chemical discharge in reaction dominated systems (log(10)Da > 0) was double of that in steady state conditions. However, high heterogeneous domains resulted in outliers where chemical discharge could be as high as 10-20 times of that in steady state conditions in high flow periods. And in transport dominated systems (log(10)Da < 0), the chemical discharge could be half of that in steady state conditions in unusually low flow conditions. In conclusion, ignoring spatio-temporal heterogeneities in a numerical modeling approach may exacerbate inaccurate estimation of nutrient export and microbial biomass. The results are relevant to long-term field monitoring studies, and for homogeneous soil column-scale experiments investigating the role of temporal dynamics on microbial redox dynamics.}, language = {en} } @article{BussKearney2024, author = {Buss, Martin and Kearney, Eric}, title = {Navigating the unknown}, series = {Journal of occupational and organizational psychology}, journal = {Journal of occupational and organizational psychology}, publisher = {Wiley}, address = {Hoboken, NJ}, issn = {0963-1798}, doi = {10.1111/joop.12500}, pages = {7}, year = {2024}, abstract = {Visionary leadership is considered to be one of the most important elements of effective leadership. Among other things, it is related to followers' perceived meaningfulness of their work. However, little is known about whether uncertainty in the workplace affects visionary leadership's effects. Given that uncertainty is rising in many, if not most, workplaces, it is vital to understand whether this development influences the extent to which visionary leadership is associated with followers' perceived meaningfulness. In a two-source, lagged design field study of 258 leader-follower dyads from different settings, we show that uncertainty moderates the relation between visionary leadership and followers' perceived meaningfulness such that this relation is more strongly positive when uncertainty is high, rather than low. Moreover, we show that with increasing uncertainty, visionary leadership is more negatively related to followers' turnover intentions via perceived meaningfulness. This research broadens our understanding of how visionary leadership may be a particularly potent tool in times of increasing uncertainty.}, language = {en} } @article{SurethKalkuhlEdenhoferetal.2023, author = {Sureth, Michael and Kalkuhl, Matthias and Edenhofer, Ottmar and Rockstr{\"o}m, Johan}, title = {A welfare economic approach to planetary boundaries}, series = {Jahrb{\"u}cher f{\"u}r National{\"o}konomie und Statistik}, volume = {243}, journal = {Jahrb{\"u}cher f{\"u}r National{\"o}konomie und Statistik}, number = {5}, publisher = {De Gruyter Oldenbourg}, address = {Berlin}, issn = {0021-4027}, doi = {10.1515/jbnst-2022-0022}, pages = {477 -- 542}, year = {2023}, abstract = {The crises of both the climate and the biosphere are manifestations of the imbalance between human extractive, and polluting activities and the Earth's regenerative capacity. Planetary boundaries define limits for biophysical systems and processes that regulate the stability and life support capacity of the Earth system, and thereby also define a safe operating space for humanity on Earth. Budgets associated to planetary boundaries can be understood as global commons: common pool resources that can be utilized within finite limits. Despite the analytical interpretation of planetary boundaries as global commons, the planetary boundaries framework is missing a thorough integration into economic theory. We aim to bridge the gap between welfare economic theory and planetary boundaries as derived in the natural sciences by presenting a unified theory of cost-benefit and cost-effectiveness analysis. Our pragmatic approach aims to overcome shortcomings of the practical applications of CEA and CBA to environmental problems of a planetary scale. To do so, we develop a model framework and explore decision paradigms that give guidance to setting limits on human activities. This conceptual framework is then applied to planetary boundaries. We conclude by using the realized insights to derive a research agenda that builds on the understanding of planetary boundaries as global commons.}, language = {en} } @article{McDowellKause2021, author = {McDowell, Michelle and Kause, Astrid}, title = {Communicating uncertainties about the effects of medical interventions using different display formats}, series = {Risk analysis : an international journal}, volume = {41}, journal = {Risk analysis : an international journal}, number = {12}, publisher = {Wiley}, address = {Hoboken}, issn = {0272-4332}, doi = {10.1111/risa.13739}, pages = {2220 -- 2239}, year = {2021}, abstract = {Communicating uncertainties in scientific evidence is important to accurately reflect scientific knowledge , increase public understanding of uncertainty, and to signal transparency and honesty in reporting. While techniques have been developed to facilitate the communication of uncertainty, many have not been empirically tested, compared for communicating different types of uncertainty, or their effects on different cognitive, trust, and behavioral outcomes have not been evaluated. The present study examined how a point estimate, imprecise estimate, conflicting estimates, or a statement about the lack of evidence about treatment effects, influenced participant's responses to communications about medical evidence. For each type of uncertainty, we adapted three display formats to communicate the information: tables, bar graphs, and icon arrays. We compared participant's best estimates of treatment effects, as well as effects on recall, subjective evaluations (understandability and usefuleness), certainty perceptions, perceptions of trustworthiness of the information, and behavioral intentions. We did not find any detrimental effects from communicating imprecision or conflicting estimates relative to a point estimate across any outcome. Furthermore, there were more favorable responses to communicating imprecision or conflicting estimates relative to lack of evidence, where participants estimated the treatment would improve outcomes by 30-50\% relative to a placebo. There were no differences across display formats, suggesting that, if well-designed, it may not matter which format is used. Future research on specific display formats or uncertainty types and with larger sample sizes would be needed to detect small effects. Implications for the communication of uncertainty are discussed.}, language = {en} } @article{vanderAaLeopoldWeidlich2020, author = {van der Aa, Han and Leopold, Henrik and Weidlich, Matthias}, title = {Partial order resolution of event logs for process conformance checking}, series = {Decision support systems : DSS}, volume = {136}, journal = {Decision support systems : DSS}, publisher = {Elsevier}, address = {Amsterdam [u.a.]}, issn = {0167-9236}, doi = {10.1016/j.dss.2020.113347}, pages = {12}, year = {2020}, abstract = {While supporting the execution of business processes, information systems record event logs. Conformance checking relies on these logs to analyze whether the recorded behavior of a process conforms to the behavior of a normative specification. A key assumption of existing conformance checking techniques, however, is that all events are associated with timestamps that allow to infer a total order of events per process instance. Unfortunately, this assumption is often violated in practice. Due to synchronization issues, manual event recordings, or data corruption, events are only partially ordered. In this paper, we put forward the problem of partial order resolution of event logs to close this gap. It refers to the construction of a probability distribution over all possible total orders of events of an instance. To cope with the order uncertainty in real-world data, we present several estimators for this task, incorporating different notions of behavioral abstraction. Moreover, to reduce the runtime of conformance checking based on partial order resolution, we introduce an approximation method that comes with a bounded error in terms of accuracy. Our experiments with real-world and synthetic data reveal that our approach improves accuracy over the state-of-the-art considerably.}, language = {en} } @article{WagenerReineckePianosi2022, author = {Wagener, Thorsten and Reinecke, Robert and Pianosi, Francesca}, title = {On the evaluation of climate change impact models}, series = {Wiley interdisciplinary reviews : Climate change}, volume = {13}, journal = {Wiley interdisciplinary reviews : Climate change}, number = {3}, publisher = {Wiley}, address = {Hoboken}, issn = {1757-7780}, doi = {10.1002/wcc.772}, pages = {13}, year = {2022}, abstract = {In-depth understanding of the potential implications of climate change is required to guide decision- and policy-makers when developing adaptation strategies and designing infrastructure suitable for future conditions. Impact models that translate potential future climate conditions into variables of interest are needed to create the causal connection between a changing climate and its impact for different sectors. Recent surveys suggest that the primary strategy for validating such models (and hence for justifying their use) heavily relies on assessing the accuracy of model simulations by comparing them against historical observations. We argue that such a comparison is necessary and valuable, but not sufficient to achieve a comprehensive evaluation of climate change impact models. We believe that a complementary, largely observation-independent, step of model evaluation is needed to ensure more transparency of model behavior and greater robustness of scenario-based analyses. This step should address the following four questions: (1) Do modeled dominant process controls match our system perception? (2) Is my model's sensitivity to changing forcing as expected? (3) Do modeled decision levers show adequate influence? (4) Can we attribute uncertainty sources throughout the projection horizon? We believe that global sensitivity analysis, with its ability to investigate a model's response to joint variations of multiple inputs in a structured way, offers a coherent approach to address all four questions comprehensively. Such additional model evaluation would strengthen stakeholder confidence in model projections and, therefore, into the adaptation strategies derived with the help of impact models. This article is categorized under: Climate Models and Modeling > Knowledge Generation with Models Assessing Impacts of Climate Change > Evaluating Future Impacts of Climate Change}, language = {en} } @article{WeatherillCotton2020, author = {Weatherill, Graeme and Cotton, Fabrice Pierre}, title = {A ground motion logic tree for seismic hazard analysis in the stable cratonic region of Europe}, series = {Bulletin of earthquake engineering : official publication of the European Association for Earthquake Engineering}, volume = {18}, journal = {Bulletin of earthquake engineering : official publication of the European Association for Earthquake Engineering}, number = {14}, publisher = {Springer Science + Business Media B.V.}, address = {Dordrecht}, issn = {1570-761X}, doi = {10.1007/s10518-020-00940-x}, pages = {6119 -- 6148}, year = {2020}, abstract = {Regions of low seismicity present a particular challenge for probabilistic seismic hazard analysis when identifying suitable ground motion models (GMMs) and quantifying their epistemic uncertainty. The 2020 European Seismic Hazard Model adopts a scaled backbone approach to characterise this uncertainty for shallow seismicity in Europe, incorporating region-to-region source and attenuation variability based on European strong motion data. This approach, however, may not be suited to stable cratonic region of northeastern Europe (encompassing Finland, Sweden and the Baltic countries), where exploration of various global geophysical datasets reveals that its crustal properties are distinctly different from the rest of Europe, and are instead more closely represented by those of the Central and Eastern United States. Building upon the suite of models developed by the recent NGA East project, we construct a new scaled backbone ground motion model and calibrate its corresponding epistemic uncertainties. The resulting logic tree is shown to provide comparable hazard outcomes to the epistemic uncertainty modelling strategy adopted for the Eastern United States, despite the different approaches taken. Comparison with previous GMM selections for northeastern Europe, however, highlights key differences in short period accelerations resulting from new assumptions regarding the characteristics of the reference rock and its influence on site amplification.}, language = {en} } @article{Korup2020, author = {Korup, Oliver}, title = {Bayesian geomorphology}, series = {Earth surface processes and landforms : the journal of the British Geomorphological Research Group}, volume = {46}, journal = {Earth surface processes and landforms : the journal of the British Geomorphological Research Group}, number = {1}, publisher = {Wiley}, address = {Hoboken}, issn = {0197-9337}, doi = {10.1002/esp.4995}, pages = {151 -- 172}, year = {2020}, abstract = {The rapidly growing amount and diversity of data are confronting us more than ever with the need to make informed predictions under uncertainty. The adverse impacts of climate change and natural hazards also motivate our search for reliable predictions. The range of statistical techniques that geomorphologists use to tackle this challenge has been growing, but rarely involves Bayesian methods. Instead, many geomorphic models rely on estimated averages that largely miss out on the variability of form and process. Yet seemingly fixed estimates of channel heads, sediment rating curves or glacier equilibrium lines, for example, are all prone to uncertainties. Neighbouring scientific disciplines such as physics, hydrology or ecology have readily embraced Bayesian methods to fully capture and better explain such uncertainties, as the necessary computational tools have advanced greatly. The aim of this article is to introduce the Bayesian toolkit to scientists concerned with Earth surface processes and landforms, and to show how geomorphic models might benefit from probabilistic concepts. I briefly review the use of Bayesian reasoning in geomorphology, and outline the corresponding variants of regression and classification in several worked examples.}, language = {en} } @article{KreibichBottoMerzetal.2016, author = {Kreibich, Heidi and Botto, Anna and Merz, Bruno and Schr{\"o}ter, Kai}, title = {Probabilistic, Multivariable Flood Loss Modeling on the Mesoscale with BT-FLEMO}, series = {Risk analysis}, volume = {37}, journal = {Risk analysis}, number = {4}, publisher = {Wiley}, address = {Hoboken}, issn = {0272-4332}, doi = {10.1111/risa.12650}, pages = {774 -- 787}, year = {2016}, abstract = {Flood loss modeling is an important component for risk analyses and decision support in flood risk management. Commonly, flood loss models describe complex damaging processes by simple, deterministic approaches like depth-damage functions and are associated with large uncertainty. To improve flood loss estimation and to provide quantitative information about the uncertainty associated with loss modeling, a probabilistic, multivariable Bagging decision Tree Flood Loss Estimation MOdel (BT-FLEMO) for residential buildings was developed. The application of BT-FLEMO provides a probability distribution of estimated losses to residential buildings per municipality. BT-FLEMO was applied and validated at the mesoscale in 19 municipalities that were affected during the 2002 flood by the River Mulde in Saxony, Germany. Validation was undertaken on the one hand via a comparison with six deterministic loss models, including both depth-damage functions and multivariable models. On the other hand, the results were compared with official loss data. BT-FLEMO outperforms deterministic, univariable, and multivariable models with regard to model accuracy, although the prediction uncertainty remains high. An important advantage of BT-FLEMO is the quantification of prediction uncertainty. The probability distribution of loss estimates by BT-FLEMO well represents the variation range of loss estimates of the other models in the case study.}, language = {en} } @article{SiegVogelMerzetal.2019, author = {Sieg, Tobias and Vogel, Kristin and Merz, Bruno and Kreibich, Heidi}, title = {Seamless Estimation of Hydrometeorological Risk Across Spatial Scales}, series = {Earth's Future}, volume = {7}, journal = {Earth's Future}, number = {5}, publisher = {Wiley-Blackwell}, address = {Hoboken, NJ}, issn = {2328-4277}, doi = {10.1029/2018EF001122}, pages = {574 -- 581}, year = {2019}, abstract = {Hydrometeorological hazards caused losses of approximately 110 billion U.S. Dollars in 2016 worldwide. Current damage estimations do not consider the uncertainties in a comprehensive way, and they are not consistent between spatial scales. Aggregated land use data are used at larger spatial scales, although detailed exposure data at the object level, such as openstreetmap.org, is becoming increasingly available across the globe.We present a probabilistic approach for object-based damage estimation which represents uncertainties and is fully scalable in space. The approach is applied and validated to company damage from the flood of 2013 in Germany. Damage estimates are more accurate compared to damage models using land use data, and the estimation works reliably at all spatial scales. Therefore, it can as well be used for pre-event analysis and risk assessments. This method takes hydrometeorological damage estimation and risk assessments to the next level, making damage estimates and their uncertainties fully scalable in space, from object to country level, and enabling the exploitation of new exposure data.}, language = {en} } @misc{DormannSchymanskiCabraletal.2012, author = {Dormann, Carsten F. and Schymanski, Stanislaus J. and Cabral, Juliano Sarmento and Chuine, Isabelle and Graham, Catherine and Hartig, Florian and Kearney, Michael and Morin, Xavier and R{\"o}mermann, Christine and Schr{\"o}der-Esselbach, Boris and Singer, Alexander}, title = {Correlation and process in species distribution models: bridging a dichotomy}, series = {Journal of biogeography}, volume = {39}, journal = {Journal of biogeography}, number = {12}, publisher = {Wiley-Blackwell}, address = {Hoboken}, issn = {0305-0270}, doi = {10.1111/j.1365-2699.2011.02659.x}, pages = {2119 -- 2131}, year = {2012}, abstract = {Within the field of species distribution modelling an apparent dichotomy exists between process-based and correlative approaches, where the processes are explicit in the former and implicit in the latter. However, these intuitive distinctions can become blurred when comparing species distribution modelling approaches in more detail. In this review article, we contrast the extremes of the correlativeprocess spectrum of species distribution models with respect to core assumptions, model building and selection strategies, validation, uncertainties, common errors and the questions they are most suited to answer. The extremes of such approaches differ clearly in many aspects, such as model building approaches, parameter estimation strategies and transferability. However, they also share strengths and weaknesses. We show that claims of one approach being intrinsically superior to the other are misguided and that they ignore the processcorrelation continuum as well as the domains of questions that each approach is addressing. Nonetheless, the application of process-based approaches to species distribution modelling lags far behind more correlative (process-implicit) methods and more research is required to explore their potential benefits. Critical issues for the employment of species distribution modelling approaches are given, together with a guideline for appropriate usage. We close with challenges for future development of process-explicit species distribution models and how they may complement current approaches to study species distributions.}, language = {en} }