@misc{Korup2020, author = {Korup, Oliver}, title = {Bayesian geomorphology}, series = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, journal = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, number = {1}, issn = {1866-8372}, doi = {10.25932/publishup-53989}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-539892}, pages = {24}, year = {2020}, abstract = {The rapidly growing amount and diversity of data are confronting us more than ever with the need to make informed predictions under uncertainty. The adverse impacts of climate change and natural hazards also motivate our search for reliable predictions. The range of statistical techniques that geomorphologists use to tackle this challenge has been growing, but rarely involves Bayesian methods. Instead, many geomorphic models rely on estimated averages that largely miss out on the variability of form and process. Yet seemingly fixed estimates of channel heads, sediment rating curves or glacier equilibrium lines, for example, are all prone to uncertainties. Neighbouring scientific disciplines such as physics, hydrology or ecology have readily embraced Bayesian methods to fully capture and better explain such uncertainties, as the necessary computational tools have advanced greatly. The aim of this article is to introduce the Bayesian toolkit to scientists concerned with Earth surface processes and landforms, and to show how geomorphic models might benefit from probabilistic concepts. I briefly review the use of Bayesian reasoning in geomorphology, and outline the corresponding variants of regression and classification in several worked examples.}, language = {en} } @article{KhuranaHesseKleidonHildebrandtetal.2022, author = {Khurana, Swamini and Hesse, Falk and Kleidon-Hildebrandt, Anke and Thullner, Martin}, title = {Should we worry about surficial dynamics when assessing nutrient cycling in the groundwater?}, series = {Frontiers in water}, volume = {4}, journal = {Frontiers in water}, publisher = {Frontiers Media}, address = {Lausanne}, issn = {2624-9375}, doi = {10.3389/frwa.2022.780297}, pages = {17}, year = {2022}, abstract = {The fluxes of water and solutes in the subsurface compartment of the Critical Zone are temporally dynamic and it is unclear how this impacts microbial mediated nutrient cycling in the spatially heterogeneous subsurface. To investigate this, we undertook numerical modeling, simulating the transport in a wide range of spatially heterogeneous domains, and the biogeochemical transformation of organic carbon and nitrogen compounds using a complex microbial community with four (4) distinct functional groups, in water saturated subsurface compartments. We performed a comprehensive uncertainty analysis accounting for varying residence times and spatial heterogeneity. While the aggregated removal of chemical species in the domains over the entire simulation period was approximately the same as that in steady state conditions, the sub-scale temporal variation of microbial biomass and chemical discharge from a domain depended strongly on the interplay of spatial heterogeneity and temporal dynamics of the forcing. We showed that the travel time and the Damkohler number (Da) can be used to predict the temporally varying chemical discharge from a spatially heterogeneous domain. In homogeneous domains, chemical discharge in temporally dynamic conditions could be double of that in the steady state conditions while microbial biomass varied up to 75\% of that in steady state conditions. In heterogeneous domains, the interquartile range of uncertainty in chemical discharge in reaction dominated systems (log(10)Da > 0) was double of that in steady state conditions. However, high heterogeneous domains resulted in outliers where chemical discharge could be as high as 10-20 times of that in steady state conditions in high flow periods. And in transport dominated systems (log(10)Da < 0), the chemical discharge could be half of that in steady state conditions in unusually low flow conditions. In conclusion, ignoring spatio-temporal heterogeneities in a numerical modeling approach may exacerbate inaccurate estimation of nutrient export and microbial biomass. The results are relevant to long-term field monitoring studies, and for homogeneous soil column-scale experiments investigating the role of temporal dynamics on microbial redox dynamics.}, language = {en} } @phdthesis{Zhou2024, author = {Zhou, Xiangqian}, title = {Modeling of spatially distributed nitrate transport to investigate the effects of drought and river restoration in the Bode catchment, Central Germany}, doi = {10.25932/publishup-62105}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-621059}, school = {Universit{\"a}t Potsdam}, pages = {VIII, 168}, year = {2024}, abstract = {The European Water Framework Directive (WFD) has identified river morphological alteration and diffuse pollution as the two main pressures affecting water bodies in Europe at the catchment scale. Consequently, river restoration has become a priority to achieve the WFD's objective of good ecological status. However, little is known about the effects of stream morphological changes, such as re-meandering, on in-stream nitrate retention at the river network scale. Therefore, catchment nitrate modeling is necessary to guide the implementation of spatially targeted and cost-effective mitigation measures. Meanwhile, Germany, like many other regions in central Europe, has experienced consecutive summer droughts from 2015-2018, resulting in significant changes in river nitrate concentrations in various catchments. However, the mechanistic exploration of catchment nitrate responses to changing weather conditions is still lacking. Firstly, a fully distributed, process-based catchment Nitrate model (mHM-Nitrate) was used, which was properly calibrated and comprehensively evaluated at numerous spatially distributed nitrate sampling locations. Three calibration schemes were designed, taking into account land use, stream order, and mean nitrate concentrations, and they varied in spatial coverage but used data from the same period (2011-2019). The model performance for discharge was similar among the three schemes, with Nash-Sutcliffe Efficiency (NSE) scores ranging from 0.88 to 0.92. However, for nitrate concentrations, scheme 2 outperformed schemes 1 and 3 when compared to observed data from eight gauging stations. This was likely because scheme 2 incorporated a diverse range of data, including low discharge values and nitrate concentrations, and thus provided a better representation of within-catchment heterogenous. Therefore, the study suggests that strategically selecting gauging stations that reflect the full range of within-catchment heterogeneity is more important for calibration than simply increasing the number of stations. Secondly, the mHM-Nitrate model was used to reveal the causal relations between sequential droughts and nitrate concentration in the Bode catchment (3200 km2) in central Germany, where stream nitrate concentrations exhibited contrasting trends from upstream to downstream reaches. The model was evaluated using data from six gauging stations, reflecting different levels of runoff components and their associated nitrate-mixing from upstream to downstream. Results indicated that the mHM-Nitrate model reproduced dynamics of daily discharge and nitrate concentration well, with Nash-Sutcliffe Efficiency ≥ 0.73 for discharge and Kling-Gupta Efficiency ≥ 0.50 for nitrate concentration at most stations. Particularly, the spatially contrasting trends of nitrate concentration were successfully captured by the model. The decrease of nitrate concentration in the lowland area in drought years (2015-2018) was presumably due to (1) limited terrestrial export loading (ca. 40\% lower than that of normal years 2004-2014), and (2) increased in-stream retention efficiency (20\% higher in summer within the whole river network). From a mechanistic modelling perspective, this study provided insights into spatially heterogeneous flow and nitrate dynamics and effects of sequential droughts, which shed light on water-quality responses to future climate change, as droughts are projected to be more frequent. Thirdly, this study investigated the effects of stream restoration via re-meandering on in-stream nitrate retention at network-scale in the well-monitored Bode catchment. The mHM-Nitrate model showed good performance in reproducing daily discharge and nitrate concentrations, with median Kling-Gupta values of 0.78 and 0.74, respectively. The mean and standard deviation of gross nitrate retention efficiency, which accounted for both denitrification and assimilatory uptake, were 5.1 ± 0.61\% and 74.7 ± 23.2\% in winter and summer, respectively, within the stream network. The study found that in the summer, denitrification rates were about two times higher in lowland sub-catchments dominated by agricultural lands than in mountainous sub-catchments dominated by forested areas, with median ± SD of 204 ± 22.6 and 102 ± 22.1 mg N m-2 d-1, respectively. Similarly, assimilatory uptake rates were approximately five times higher in streams surrounded by lowland agricultural areas than in those in higher-elevation, forested areas, with median ± SD of 200 ± 27.1 and 39.1 ± 8.7 mg N m-2 d-1, respectively. Therefore, restoration strategies targeting lowland agricultural areas may have greater potential for increasing nitrate retention. The study also found that restoring stream sinuosity could increase net nitrate retention efficiency by up to 25.4 ± 5.3\%, with greater effects seen in small streams. These results suggest that restoration efforts should consider augmenting stream sinuosity to increase nitrate retention and decrease nitrate concentrations at the catchment scale.}, language = {en} } @article{WagenerReineckePianosi2022, author = {Wagener, Thorsten and Reinecke, Robert and Pianosi, Francesca}, title = {On the evaluation of climate change impact models}, series = {Wiley interdisciplinary reviews : Climate change}, volume = {13}, journal = {Wiley interdisciplinary reviews : Climate change}, number = {3}, publisher = {Wiley}, address = {Hoboken}, issn = {1757-7780}, doi = {10.1002/wcc.772}, pages = {13}, year = {2022}, abstract = {In-depth understanding of the potential implications of climate change is required to guide decision- and policy-makers when developing adaptation strategies and designing infrastructure suitable for future conditions. Impact models that translate potential future climate conditions into variables of interest are needed to create the causal connection between a changing climate and its impact for different sectors. Recent surveys suggest that the primary strategy for validating such models (and hence for justifying their use) heavily relies on assessing the accuracy of model simulations by comparing them against historical observations. We argue that such a comparison is necessary and valuable, but not sufficient to achieve a comprehensive evaluation of climate change impact models. We believe that a complementary, largely observation-independent, step of model evaluation is needed to ensure more transparency of model behavior and greater robustness of scenario-based analyses. This step should address the following four questions: (1) Do modeled dominant process controls match our system perception? (2) Is my model's sensitivity to changing forcing as expected? (3) Do modeled decision levers show adequate influence? (4) Can we attribute uncertainty sources throughout the projection horizon? We believe that global sensitivity analysis, with its ability to investigate a model's response to joint variations of multiple inputs in a structured way, offers a coherent approach to address all four questions comprehensively. Such additional model evaluation would strengthen stakeholder confidence in model projections and, therefore, into the adaptation strategies derived with the help of impact models. This article is categorized under: Climate Models and Modeling > Knowledge Generation with Models Assessing Impacts of Climate Change > Evaluating Future Impacts of Climate Change}, language = {en} } @article{Korup2020, author = {Korup, Oliver}, title = {Bayesian geomorphology}, series = {Earth surface processes and landforms : the journal of the British Geomorphological Research Group}, volume = {46}, journal = {Earth surface processes and landforms : the journal of the British Geomorphological Research Group}, number = {1}, publisher = {Wiley}, address = {Hoboken}, issn = {0197-9337}, doi = {10.1002/esp.4995}, pages = {151 -- 172}, year = {2020}, abstract = {The rapidly growing amount and diversity of data are confronting us more than ever with the need to make informed predictions under uncertainty. The adverse impacts of climate change and natural hazards also motivate our search for reliable predictions. The range of statistical techniques that geomorphologists use to tackle this challenge has been growing, but rarely involves Bayesian methods. Instead, many geomorphic models rely on estimated averages that largely miss out on the variability of form and process. Yet seemingly fixed estimates of channel heads, sediment rating curves or glacier equilibrium lines, for example, are all prone to uncertainties. Neighbouring scientific disciplines such as physics, hydrology or ecology have readily embraced Bayesian methods to fully capture and better explain such uncertainties, as the necessary computational tools have advanced greatly. The aim of this article is to introduce the Bayesian toolkit to scientists concerned with Earth surface processes and landforms, and to show how geomorphic models might benefit from probabilistic concepts. I briefly review the use of Bayesian reasoning in geomorphology, and outline the corresponding variants of regression and classification in several worked examples.}, language = {en} } @phdthesis{Schroeter2020, author = {Schr{\"o}ter, Kai}, title = {Improved flood risk assessment}, doi = {10.25932/publishup-48024}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-480240}, school = {Universit{\"a}t Potsdam}, pages = {408}, year = {2020}, abstract = {Rivers have always flooded their floodplains. Over 2.5 billion people worldwide have been affected by flooding in recent decades. The economic damage is also considerable, averaging 100 billion US dollars per year. There is no doubt that damage and other negative effects of floods can be avoided. However, this has a price: financially and politically. Costs and benefits can be estimated through risk assessments. Questions about the location and frequency of floods, about the objects that could be affected and their vulnerability are of importance for flood risk managers, insurance companies and politicians. Thus, both variables and factors from the fields of hydrology and sociol-economics play a role with multi-layered connections. One example are dikes along a river, which on the one hand contain floods, but on the other hand, by narrowing the natural floodplains, accelerate the flood discharge and increase the danger of flooding for the residents downstream. Such larger connections must be included in the assessment of flood risk. However, in current procedures this is accompanied by simplifying assumptions. Risk assessments are therefore fuzzy and associated with uncertainties. This thesis investigates the benefits and possibilities of new data sources for improving flood risk assessment. New methods and models are developed, which take the mentioned interrelations better into account and also quantify the existing uncertainties of the model results, and thus enable statements about the reliability of risk estimates. For this purpose, data on flood events from various sources are collected and evaluated. This includes precipitation and flow records at measuring stations as well as for instance images from social media, which can help to delineate the flooded areas and estimate flood damage with location information. Machine learning methods have been successfully used to recognize and understand correlations between floods and impacts from a wide range of data and to develop improved models. Risk models help to develop and evaluate strategies to reduce flood risk. These tools also provide advanced insights into the interplay of various factors and on the expected consequences of flooding. This work shows progress in terms of an improved assessment of flood risks by using diverse data from different sources with innovative methods as well as by the further development of models. Flood risk is variable due to economic and climatic changes, and other drivers of risk. In order to keep the knowledge about flood risks up-to-date, robust, efficient and adaptable methods as proposed in this thesis are of increasing importance.}, language = {en} } @phdthesis{Roezer2018, author = {R{\"o}zer, Viktor}, title = {Pluvial flood loss to private households}, doi = {10.25932/publishup-42991}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-429910}, school = {Universit{\"a}t Potsdam}, pages = {XXII, 109}, year = {2018}, abstract = {Today, more than half of the world's population lives in urban areas. With a high density of population and assets, urban areas are not only the economic, cultural and social hubs of every society, they are also highly susceptible to natural disasters. As a consequence of rising sea levels and an expected increase in extreme weather events caused by a changing climate in combination with growing cities, flooding is an increasing threat to many urban agglomerations around the globe. To mitigate the destructive consequences of flooding, appropriate risk management and adaptation strategies are required. So far, flood risk management in urban areas is almost exclusively focused on managing river and coastal flooding. Often overlooked is the risk from small-scale rainfall-triggered flooding, where the rainfall intensity of rainstorms exceeds the capacity of urban drainage systems, leading to immediate flooding. Referred to as pluvial flooding, this flood type exclusive to urban areas has caused severe losses in cities around the world. Without further intervention, losses from pluvial flooding are expected to increase in many urban areas due to an increase of impervious surfaces compounded with an aging drainage infrastructure and a projected increase in heavy precipitation events. While this requires the integration of pluvial flood risk into risk management plans, so far little is known about the adverse consequences of pluvial flooding due to a lack of both detailed data sets and studies on pluvial flood impacts. As a consequence, methods for reliably estimating pluvial flood losses, needed for pluvial flood risk assessment, are still missing. Therefore, this thesis investigates how pluvial flood losses to private households can be reliably estimated, based on an improved understanding of the drivers of pluvial flood loss. For this purpose, detailed data from pluvial flood-affected households was collected through structured telephone- and web-surveys following pluvial flood events in Germany and the Netherlands. Pluvial flood losses to households are the result of complex interactions between impact characteristics such as the water depth and a household's resistance as determined by its risk awareness, preparedness, emergency response, building properties and other influencing factors. Both exploratory analysis and machine-learning approaches were used to analyze differences in resistance and impacts between households and their effects on the resulting losses. The comparison of case studies showed that the awareness around pluvial flooding among private households is quite low. Low awareness not only challenges the effective dissemination of early warnings, but was also found to influence the implementation of private precautionary measures. The latter were predominately implemented by households with previous experience of pluvial flooding. Even cases where previous flood events affected a different part of the same city did not lead to an increase in preparedness of the surveyed households, highlighting the need to account for small-scale variability in both impact and resistance parameters when assessing pluvial flood risk. While it was concluded that the combination of low awareness, ineffective early warning and the fact that only a minority of buildings were adapted to pluvial flooding impaired the coping capacities of private households, the often low water levels still enabled households to mitigate or even prevent losses through a timely and effective emergency response. These findings were confirmed by the detection of loss-influencing variables, showing that cases in which households were able to prevent any loss to the building structure are predominately explained by resistance variables such as the household's risk awareness, while the degree of loss is mainly explained by impact variables. Based on the important loss-influencing variables detected, different flood loss models were developed. Similar to flood loss models for river floods, the empirical data from the preceding data collection was used to train flood loss models describing the relationship between impact and resistance parameters and the resulting loss to building structures. Different approaches were adapted from river flood loss models using both models with the water depth as only predictor for building structure loss and models incorporating additional variables from the preceding variable detection routine. The high predictive errors of all compared models showed that point predictions are not suitable for estimating losses on the building level, as they severely impair the reliability of the estimates. For that reason, a new probabilistic framework based on Bayesian inference was introduced that is able to provide predictive distributions instead of single loss estimates. These distributions not only give a range of probable losses, they also provide information on how likely a specific loss value is, representing the uncertainty in the loss estimate. Using probabilistic loss models, it was found that the certainty and reliability of a loss estimate on the building level is not only determined by the use of additional predictors as shown in previous studies, but also by the choice of response distribution defining the shape of the predictive distribution. Here, a mix between a beta and a Bernoulli distribution to account for households that are able to prevent losses to their building's structure was found to provide significantly more certain and reliable estimates than previous approaches using Gaussian or non-parametric response distributions. The successful model transfer and post-event application to estimate building structure loss in Houston, TX, caused by pluvial flooding during Hurricane Harvey confirmed previous findings, and demonstrated the potential of the newly developed multi-variable beta model for future risk assessments. The highly detailed input data set constructed from openly available data sources containing over 304,000 affected buildings in Harris County further showed the potential of data-driven, building-level loss models for pluvial flood risk assessment. In conclusion, pluvial flood losses to private households are the result of complex interactions between impact and resistance variables, which should be represented in loss models. The local occurrence of pluvial floods requires loss estimates on high spatial resolutions, i.e. on the building level, where losses are variable and uncertainties are high. Therefore, probabilistic loss estimates describing the uncertainty of the estimate should be used instead of point predictions. While the performance of probabilistic models on the building level are mainly driven by the choice of response distribution, multi-variable models are recommended for two reasons: First, additional resistance variables improve the detection of cases in which households were able to prevent structural losses. Second, the added variability of additional predictors provides a better representation of the uncertainties when loss estimates from multiple buildings are aggregated. This leads to the conclusion that data-driven probabilistic loss models on the building level allow for a reliable loss estimation at an unprecedented level of detail, with a consistent quantification of uncertainties on all aggregation levels. This makes the presented approach suitable for a wide range of applications, from decision support in spatial planning to impact- based early warning systems.}, language = {en} } @phdthesis{Kox2018, author = {Kox, Thomas}, title = {Perception and use of uncertainty in severe weather warnings}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-411541}, school = {Universit{\"a}t Potsdam}, pages = {154}, year = {2018}, abstract = {Uncertainty is an essential part of atmospheric processes and thus inherent to weather forecasts. Nevertheless, weather forecasts and warnings are still predominately issued as deterministic (yes or no) forecasts, although research suggests that providing weather forecast users with additional information about the forecast uncertainty can enhance the preparation of mitigation measures. Communicating forecast uncertainty would allow for a provision of information on possible future events at an earlier time. The desired benefit is to enable the users to start with preparatory protective action at an earlier stage of time based on the their own risk assessment and decision threshold. But not all users have the same threshold for taking action. In the course of the project WEXICOM ('Wetterwarnungen: Von der Extremereignis-Information zu Kommunikation und Handlung') funded by the Deutscher Wetterdienst (DWD), three studies were conducted between the years 2012 and 2016 to reveal how weather forecasts and warnings are reflected in weather-related decision-making. The studies asked which factors influence the perception of forecasts and the decision to take protective action and how forecast users make sense of probabilistic information and the additional lead time. In a first exploratory study conducted in 2012, members of emergency services in Germany were asked questions about how weather warnings are communicated to professional endusers in the emergency community and how the warnings are converted into mitigation measures. A large number of open questions were selected to identify new topics of interest. The questions covered topics like users' confidence in forecasts, their understanding of probabilistic information as well as their lead time and decision thresholds to start with preparatory mitigation measures. Results show that emergency service personnel generally have a good sense of uncertainty inherent in weather forecasts. Although no single probability threshold could be identified for organisations to start with preparatory mitigation measures, it became clear that emergency services tend to avoid forecasts based on low probabilities as a basis for their decisions. Based on this findings, a second study conducted with residents of Berlin in 2014 further investigated the question of decision thresholds. The survey questions related to the topics of the perception of and prior experience with severe weather, trustworthiness of forecasters and confidence in weather forecasts, and socio-demographic and social-economic characteristics. Within the questionnaire a scenario was created to determine individual decision thresholds and see whether subgroups of the sample lead to different thresholds. The results show that people's willingness to act tends to be higher and decision thresholds tend to be lower if the expected weather event is more severe or the property at risk is of higher value. Several influencing factors of risk perception have significant effects such as education, housing status and ability to act, whereas socio-demographic determinants alone are often not sufficient to fully grasp risk perception and protection behaviour. Parallel to the quantitative studies, an interview study was conducted with 27 members of German civil protection between 2012 and 2016. The results show that the latest developments in (numerical) weather forecasting do not necessarily fit the current practice of German emergency services. These practices are mostly carried out on alarms and ground truth in a reactive manner rather than on anticipation based on prognosis or forecasts. As the potential consequences rather than the event characteristics determine protective action, the findings support the call and need for impact-based warnings. Forecasters will rely on impact data and need to learn the users' understanding of impact. Therefore, it is recommended to enhance weather communication not only by improving computer models and observation tools, but also by focusing on the aspects of communication and collaboration. Using information about uncertainty demands awareness about and acceptance of the limits of knowledge, hence, the capabilities of the forecaster to anticipate future developments of the atmosphere and the capabilities of the user to make sense of this information.}, language = {en} } @phdthesis{Zurell2011, author = {Zurell, Damaris}, title = {Integrating dynamic and statistical modelling approaches in order to improve predictions for scenarios of environmental change}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-56845}, school = {Universit{\"a}t Potsdam}, year = {2011}, abstract = {Species respond to environmental change by dynamically adjusting their geographical ranges. Robust predictions of these changes are prerequisites to inform dynamic and sustainable conservation strategies. Correlative species distribution models (SDMs) relate species' occurrence records to prevailing environmental factors to describe the environmental niche. They have been widely applied in global change context as they have comparably low data requirements and allow for rapid assessments of potential future species' distributions. However, due to their static nature, transient responses to environmental change are essentially ignored in SDMs. Furthermore, neither dispersal nor demographic processes and biotic interactions are explicitly incorporated. Therefore, it has often been suggested to link statistical and mechanistic modelling approaches in order to make more realistic predictions of species' distributions for scenarios of environmental change. In this thesis, I present two different ways of such linkage. (i) Mechanistic modelling can act as virtual playground for testing statistical models and allows extensive exploration of specific questions. I promote this 'virtual ecologist' approach as a powerful evaluation framework for testing sampling protocols, analyses and modelling tools. Also, I employ such an approach to systematically assess the effects of transient dynamics and ecological properties and processes on the prediction accuracy of SDMs for climate change projections. That way, relevant mechanisms are identified that shape the species' response to altered environmental conditions and which should hence be considered when trying to project species' distribution through time. (ii) I supplement SDM projections of potential future habitat for black grouse in Switzerland with an individual-based population model. By explicitly considering complex interactions between habitat availability and demographic processes, this allows for a more direct assessment of expected population response to environmental change and associated extinction risks. However, predictions were highly variable across simulations emphasising the need for principal evaluation tools like sensitivity analysis to assess uncertainty and robustness in dynamic range predictions. Furthermore, I identify data coverage of the environmental niche as a likely cause for contrasted range predictions between SDM algorithms. SDMs may fail to make reliable predictions for truncated and edge niches, meaning that portions of the niche are not represented in the data or niche edges coincide with data limits. Overall, my thesis contributes to an improved understanding of uncertainty factors in predictions of range dynamics and presents ways how to deal with these. Finally I provide preliminary guidelines for predictive modelling of dynamic species' response to environmental change, identify key challenges for future research and discuss emerging developments.}, language = {en} } @phdthesis{Schaber2002, author = {Schaber, J{\"o}rg}, title = {Phenology in Germany in the 20th century : methods, analyses and models}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-0000532}, school = {Universit{\"a}t Potsdam}, year = {2002}, abstract = {Die L{\"a}nge der Vegetationsperiode (VP) spielt eine zentrale Rolle f{\"u}r die interannuelle Variation der Kohlenstoffspeicherung terrestrischer {\"O}kosysteme. Die Analyse von Beobachtungsdaten hat gezeigt, dass sich die VP in den letzten Jahrzehnten in den n{\"o}rdlichen Breiten verl{\"a}ngert hat. Dieses Ph{\"a}nomen wurde oft im Zusammenhang mit der globalen Erw{\"a}rmung diskutiert, da die Ph{\"a}nologie von der Temperatur beeinflusst wird. Die Analyse der Pflanzenph{\"a}nologie in S{\"u}ddeutschland im 20. Jahrhundert zeigte: - Die starke Verfr{\"u}hung der Fr{\"u}hjahrsphasen in dem Jahrzehnt vor 1999 war kein singul{\"a}res Ereignis im 20. Jahrhundert. Schon in fr{\"u}heren Dekaden gab es {\"a}hnliche Trends. Es konnten Perioden mit unterschiedlichem Trendverhalten identifiziert werden. - Es gab deutliche Unterschiede in den Trends von fr{\"u}hen und sp{\"a}ten Fr{\"u}hjahrsphasen. Die fr{\"u}hen Fr{\"u}hjahrsphasen haben sich stetig verfr{\"u}ht, mit deutlicher Verfr{\"u}hung zwischen 1931 und 1948, moderater Verfr{\"u}hung zwischen 1948 und 1984 und starker Verfr{\"u}hung zwischen 1984 und 1999. Die sp{\"a}ten Fr{\"u}hjahrsphasen hingegen, wechselten ihr Trendverhalten in diesen Perioden von einer Verfr{\"u}hung zu einer deutlichen Versp{\"a}tung wieder zu einer starken Verfr{\"u}hung. Dieser Unterschied in der Trendentwicklung zwischen fr{\"u}hen und sp{\"a}ten Fr{\"u}hjahrsphasen konnte auch f{\"u}r ganz Deutschland in den Perioden 1951 bis 1984 und 1984 bis 1999 beobachtet werden. Der bestimmende Einfluss der Temperatur auf die Fr{\"u}hjahrsphasen und ihr modifizierender Einfluss auf die Herbstphasen konnte best{\"a}tigt werden. Es zeigt sich jedoch, dass - die Ph{\"a}nologie bestimmende Funktionen der Temperatur nicht mit einem globalen j{\"a}hrlichen CO2 Signal korreliert waren, welches als Index f{\"u}r die globale Erw{\"a}rmung verwendet wurde - ein Index f{\"u}r grossr{\"a}umige regionale Zirkulationsmuster (NAO-Index) nur zu einem kleinen Teil die beobachtete ph{\"a}nologischen Variabilit{\"a}t erkl{\"a}ren konnte. Das beobachtete unterschiedliche Trendverhalten zwischen fr{\"u}hen und sp{\"a}ten Fr{\"u}hjahrsphasen konnte auf die unterschiedliche Entwicklung von M{\"a}rz- und Apriltemperaturen zur{\"u}ckgef{\"u}hrt werden. W{\"a}hrend sich die M{\"a}rztemperaturen im Laufe des 20. Jahrhunderts mit einer zunehmenden Variabilit{\"a}t in den letzten 50 Jahren stetig erh{\"o}ht haben, haben sich die Apriltemperaturen zwischen dem Ende der 1940er und Mitte der 1980er merklich abgek{\"u}hlt und dann wieder deutlich erw{\"a}rmt. Es wurde geschlussfolgert, dass die Verfr{\"u}hungen in der Fr{\"u}hjahrsph{\"a}nologie in den letzten Dekaden Teile multi-dekadischer Fluktuationen sind, welche sich nach Spezies und relevanter saisonaler Temperatur unterscheiden. Aufgrund dieser Fluktuationen konnte kein Zusammenhang mit einem globalen Erw{\"a}rmungsignal gefunden werden. Im Durchschnitt haben sich alle betrachteten Fr{\"u}hjahrsphasen zwischen 1951 und 1999 in Naturr{\"a}umen in Deutschland zwischen 5 und 20 Tagen verfr{\"u}ht. Ein starker Unterschied in der Verfr{\"u}hung zwischen fr{\"u}hen und sp{\"a}ten Fr{\"u}hjahrsphasen liegt an deren erw{\"a}hntem unterschiedlichen Verhalten. Die Blattverf{\"a}rbung hat sich zwischen 1951 und 1999 f{\"u}r alle Spezies versp{\"a}tet, aber nach 1984 im Durchschnitt verfr{\"u}ht. Die VP hat sich in Deutschland zwischen 1951 und 1999 um ca. 10 Tage verl{\"a}ngert. Es ist haupts{\"a}chlich die {\"A}nderung in den Fr{\"u}hjahrphasen, die zu einer {\"A}nderung in der potentiell absorbierten Strahlung (PAS) f{\"u}hrt. Dar{\"u}ber hinaus sind es die sp{\"a}ten Fr{\"u}hjahrsphasen, die pro Tag Verfr{\"u}hung st{\"a}rker profitieren, da die zus{\"a}tzlichen Tage l{\"a}nger undw{\"a}rmer sind als dies f{\"u}r die fr{\"u}hen Phasen der Fall ist. Um die relative {\"A}nderung in PAS im Vergleich der Spezies abzusch{\"a}tzen, m{\"u}ssen allerdings auch die Ver{\"a}nderungen in den Herbstphasen ber{\"u}cksichtigt werden. Der deutliche Unterschied zwischen fr{\"u}hen und sp{\"a}ten Fr{\"u}hjahrsphasen konnte durch die Anwendung einer neuen Methode zur Konstruktion von Zeitreihen herausgearbeitet werden. Der neue methodische Ansatz erlaubte die Ableitung verl{\"a}sslicher 100-j{\"a}hriger Zeitreihen und die Konstruktion von lokalen kombinierten Zeitreihen, welche die Datenverf{\"u}gbarkeit f{\"u}r die Modellentwicklung erh{\"o}hten. Ausser analysierten Protokollierungsfehlern wurden mikroklimatische, genetische und Beobachtereinfl{\"u}sse als Quellen von Unsicherheit in ph{\"a}nologischen Daten identifiziert. Ph{\"a}nologischen Beobachtungen eines Ortes k{\"o}nnen sch{\"a}tzungsweise 24 Tage um das parametrische Mittel schwanken.Dies unterst{\"u}tzt die 30-Tage Regel f{\"u}r die Detektion von Ausreissern. Neue Ph{\"a}nologiemodelle, die den Blattaustrieb aus t{\"a}glichen Temperaturreihen simulieren, wurden entwickelt. Diese Modelle basieren auf einfachen Interaktionen zwischen aktivierenden und hemmenden Substanzen, welche die Entwicklungsstadien einer Pflanze bestimmen. Im Allgemeinen konnten die neuen Modelle die Beobachtungsdaten besser simulieren als die klassischen Modelle. Weitere Hauptresultate waren: - Der Bias der klassischen Modelle, d.h. {\"U}bersch{\"a}tzung von fr{\"u}hen und Untersch{\"a}tzung von sp{\"a}ten Beobachtungen, konnte reduziert, aber nicht vollst{\"a}ndig eliminiert werden. - Die besten Modellvarianten f{\"u}r verschiedene Spezies wiesen darauf hin, dass f{\"u}r die sp{\"a}ten Fr{\"u}hjahrsphasen die Tagesl{\"a}nge eine wichtigere Rolle spielt als f{\"u}r die fr{\"u}hen Phasen. - Die Vernalisation spielte gegen{\"u}ber den Temperaturen kurz vor dem Blattaustrieb nur eine untergeordnete Rolle.}, language = {en} }