@misc{SmithRheinwaltBookhagen2019, author = {Smith, Taylor and Rheinwalt, Aljoscha and Bookhagen, Bodo}, title = {Determining the optimal grid resolution for topographic analysis on an airborne lidar dataset}, series = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, number = {725}, issn = {1866-8372}, doi = {10.25932/publishup-43016}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-430165}, pages = {475 -- 489}, year = {2019}, abstract = {Digital elevation models (DEMs) are a gridded representation of the surface of the Earth and typically contain uncertainties due to data collection and processing. Slope and aspect estimates on a DEM contain errors and uncertainties inherited from the representation of a continuous surface as a grid (referred to as truncation error; TE) and from any DEM uncertainty. We analyze in detail the impacts of TE and propagated elevation uncertainty (PEU) on slope and aspect. Using synthetic data as a control, we define functions to quantify both TE and PEU for arbitrary grids. We then develop a quality metric which captures the combined impact of both TE and PEU on the calculation of topographic metrics. Our quality metric allows us to examine the spatial patterns of error and uncertainty in topographic metrics and to compare calculations on DEMs of different sizes and accuracies. Using lidar data with point density of ∼10 pts m-2 covering Santa Cruz Island in southern California, we are able to generate DEMs and uncertainty estimates at several grid resolutions. Slope (aspect) errors on the 1 m dataset are on average 0.3∘ (0.9∘) from TE and 5.5∘ (14.5∘) from PEU. We calculate an optimal DEM resolution for our SCI lidar dataset of 4 m that minimizes the error bounds on topographic metric calculations due to the combined influence of TE and PEU for both slope and aspect calculations over the entire SCI. Average slope (aspect) errors from the 4 m DEM are 0.25∘ (0.75∘) from TE and 5∘ (12.5∘) from PEU. While the smallest grid resolution possible from the high-density SCI lidar is not necessarily optimal for calculating topographic metrics, high point-density data are essential for measuring DEM uncertainty across a range of resolutions.}, language = {en} } @misc{KallmeyerGreweGlombitzaetal.2015, author = {Kallmeyer, Jens and Grewe, Sina and Glombitza, Clemens and Kitte, J. Axel}, title = {Microbial abundance in lacustrine sediments}, series = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, number = {723}, issn = {1866-8372}, doi = {10.25932/publishup-42982}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-429828}, pages = {1667 -- 1677}, year = {2015}, abstract = {The ICDP "PaleoVan" drilling campaign at Lake Van, Turkey, provided a long (> 100 m) record of lacustrine subsurface sedimentary microbial cell abundance. After the ICDP campaign at Potrok Aike, Argentina, this is only the second time deep lacustrine cell counts have been documented. Two sites were cored and revealed a strikingly similar cell distribution despite differences in organic matter content and microbial activity. Although shifted towards higher values, cell counts from Lake Potrok Aike, Argentina, reveal very similar distribution patterns with depth. The lacustrine cell count data are significantly different from published marine records; the most probable cause is differences in sedimentary organic matter composition with marine sediments containing a higher fraction of labile organic matter. Previous studies showed that microbial activity and abundance increase centimetres to metres around geologic interfaces. The finely laminated Lake Van sediment allowed studying this phenomenon on the microscale. We sampled at the scale of individual laminae, and in some depth intervals, we found large differences in microbial abundance between the different laminae. This small-scale heterogeneity is normally overlooked due to much larger sampling intervals that integrate over several centimetres. However, not all laminated intervals exhibit such large differences in microbial abundance, and some non-laminated horizons show large variability on the millimetre scale as well. The reasons for such contrasting observations remain elusive, but indicate that heterogeneity of microbial abundance in subsurface sediments has not been taken into account sufficiently. These findings have implications not just for microbiological studies but for geochemistry as well, as the large differences in microbial abundance clearly show that there are distinct microhabitats that deviate considerably from the surrounding layers.}, language = {en} } @misc{PilzDelgadoVossetal.2019, author = {Pilz, Tobias and Delgado, Jos{\´e} Miguel Martins and Voss, Sebastian and Vormoor, Klaus Josef and Francke, Till and Cunha Costa, Alexandre and Martins, Eduardo and Bronstert, Axel}, title = {Seasonal drought prediction for semiarid northeast Brazil}, series = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, number = {702}, issn = {1866-8372}, doi = {10.25932/publishup-42795}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-427950}, pages = {21}, year = {2019}, abstract = {The semiarid northeast of Brazil is one of the most densely populated dryland regions in the world and recurrently affected by severe droughts. Thus, reliable seasonal forecasts of streamflow and reservoir storage are of high value for water managers. Such forecasts can be generated by applying either hydrological models representing underlying processes or statistical relationships exploiting correlations among meteorological and hydrological variables. This work evaluates and compares the performances of seasonal reservoir storage forecasts derived by a process-based hydrological model and a statistical approach. Driven by observations, both models achieve similar simulation accuracies. In a hindcast experiment, however, the accuracy of estimating regional reservoir storages was considerably lower using the process-based hydrological model, whereas the resolution and reliability of drought event predictions were similar by both approaches. Further investigations regarding the deficiencies of the process-based model revealed a significant influence of antecedent wetness conditions and a higher sensitivity of model prediction performance to rainfall forecast quality. Within the scope of this study, the statistical model proved to be the more straightforward approach for predictions of reservoir level and drought events at regionally and monthly aggregated scales. However, for forecasts at finer scales of space and time or for the investigation of underlying processes, the costly initialisation and application of a process-based model can be worthwhile. Furthermore, the application of innovative data products, such as remote sensing data, and operational model correction methods, like data assimilation, may allow for an enhanced exploitation of the advanced capabilities of process-based hydrological models.}, language = {en} } @misc{SpiekermannHarderGilmoreetal.2019, author = {Spiekermann, Georg and Harder, M. and Gilmore, Keith and Zalden, Peter and Sahle, Christoph J. and Petitgirard, Sylvain and Wilke, Max and Biedermann, Nicole and Weis, Thomas and Morgenroth, Wolfgang and Tse, John S. and Kulik, E. and Nishiyama, Norimasa and Yava{\c{s}}, Hasan and Sternemann, Christian}, title = {Persistent Octahedral Coordination in Amorphous GeO₂ Up to 100 GPa by Kβ'' X-Ray Emission Spectroscopy}, series = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, number = {699}, issn = {1866-8372}, doi = {10.25932/publishup-42775}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-427755}, year = {2019}, abstract = {We measure valence-to-core x-ray emission spectra of compressed crystalline GeO₂ up to 56 GPa and of amorphous GeO₂ up to 100 GPa. In a novel approach, we extract the Ge coordination number and mean Ge-O distances from the emission energy and the intensity of the Kβ'' emission line. The spectra of high-pressure polymorphs are calculated using the Bethe-Salpeter equation. Trends observed in the experimental and calculated spectra are found to match only when utilizing an octahedral model. The results reveal persistent octahedral Ge coordination with increasing distortion, similar to the compaction mechanism in the sequence of octahedrally coordinated crystalline GeO₂ high-pressure polymorphs.}, language = {en} } @misc{RolinskiRammigWalzetal.2014, author = {Rolinski, Susanne and Rammig, Anja and Walz, Ariane and von Bloh, Werner and van Oijen, M. and Thonicke, Kirsten}, title = {A probabilistic risk assessment for the vulnerability of the European carbon cycle to weather extremes}, series = {Postprints der Universit{\"a}t Potsdam : Mathematisch naturwissenschaftliche Reihe (487)}, journal = {Postprints der Universit{\"a}t Potsdam : Mathematisch naturwissenschaftliche Reihe (487)}, number = {487}, issn = {1866-8372}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-407999}, pages = {1813 -- 1831}, year = {2014}, abstract = {Extreme weather events are likely to occur more often under climate change and the resulting effects on ecosystems could lead to a further acceleration of climate change. But not all extreme weather events lead to extreme ecosystem response. Here, we focus on hazardous ecosystem behaviour and identify coinciding weather conditions. We use a simple probabilistic risk assessment based on time series of ecosystem behaviour and climate conditions. Given the risk assessment terminology, vulnerability and risk for the previously defined hazard are estimated on the basis of observed hazardous ecosystem behaviour. We apply this approach to extreme responses of terrestrial ecosystems to drought, defining the hazard as a negative net biome productivity over a 12-month period. We show an application for two selected sites using data for 1981-2010 and then apply the method to the pan-European scale for the same period, based on numerical modelling results (LPJmL for ecosystem behaviour; ERA-Interim data for climate). Our site-specific results demonstrate the applicability of the proposed method, using the SPEI to describe the climate condition. The site in Spain provides an example of vulnerability to drought because the expected value of the SPEI is 0.4 lower for hazardous than for non-hazardous ecosystem behaviour. In northern Germany, on the contrary, the site is not vulnerable to drought because the SPEI expectation values imply wetter conditions in the hazard case than in the non-hazard case. At the pan-European scale, ecosystem vulnerability to drought is calculated in the Mediterranean and temperate region, whereas Scandinavian ecosystems are vulnerable under conditions without water shortages. These first model- based applications indicate the conceptual advantages of the proposed method by focusing on the identification of critical weather conditions for which we observe hazardous ecosystem behaviour in the analysed data set. Application of the method to empirical time series and to future climate would be important next steps to test the approach.}, language = {en} } @misc{PurintonBookhagen2018, author = {Purinton, Benjamin and Bookhagen, Bodo}, title = {Measuring decadal vertical land-level changes from SRTM-C (2000) and TanDEM-X (∼ 2015) in the south-central Andes}, series = {Postprints der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, number = {480}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-420487}, pages = {16}, year = {2018}, abstract = {In the arctic and high mountains it is common to measure vertical changes of ice sheets and glaciers via digital elevation model (DEM) differencing. This requires the signal of change to outweigh the noise associated with the datasets. Excluding large landslides, on the ice-free earth the land-level change is smaller in vertical magnitude and thus requires more accurate DEMs for differencing and identification of change. Previously, this has required meter to submeter data at small spatial scales. Following careful corrections, we are able to measure land-level changes in gravel-bed channels and steep hillslopes in the south-central Andes using the SRTM-C (collected in 2000) and the TanDEM-X (collected from 2010 to 2015) near-global 12-30m DEMs. Long-standing errors in the SRTM-C are corrected using the TanDEM-X as a control surface and applying cosine-fit co-registration to remove ∼ 1∕10 pixel (∼ 3m) shifts, fast Fourier transform (FFT) and filtering to remove SRTM-C short- and long-wavelength stripes, and blocked shifting to remove remaining complex biases. The datasets are then differenced and outlier pixels are identified as a potential signal for the case of gravel-bed channels and hillslopes. We are able to identify signals of incision and aggradation (with magnitudes down to ∼ 3m in the best case) in two  > 100km river reaches, with increased geomorphic activity downstream of knickpoints. Anthropogenic gravel excavation and piling is prominently measured, with magnitudes exceeding ±5m (up to  > 10m for large piles). These values correspond to conservative average rates of 0.2 to > 0.5myr-1 for vertical changes in gravel-bed rivers. For hillslopes, since we require stricter cutoffs for noise, we are only able to identify one major landslide in the study area with a deposit volume of 16±0.15×106m3. Additional signals of change can be garnered from TanDEM-X auxiliary layers; however, these are more difficult to quantify. The methods presented can be extended to any region of the world with SRTM-C and TanDEM-X coverage where vertical land-level changes are of interest, with the caveat that remaining vertical uncertainties in primarily the SRTM-C limit detection in steep and complex topography.}, language = {en} } @misc{CrisologoWarrenMuehlbaueretal.2018, author = {Crisologo, Irene and Warren, Robert A. and M{\"u}hlbauer, Kai and Heistermann, Maik}, title = {Enhancing the consistency of spaceborne and ground-based radar comparisons by using beam blockage fraction as a quality filter}, series = {Atmospheric Measurement Techniques}, journal = {Atmospheric Measurement Techniques}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-418198}, pages = {14}, year = {2018}, abstract = {We explore the potential of spaceborne radar (SR) observations from the Ku-band precipitation radars onboard the Tropical Rainfall Measuring Mission (TRMM) and Global Precipitation Measurement (GPM) satellites as a reference to quantify the ground radar (GR) reflectivity bias. To this end, the 3-D volume-matching algorithm proposed by Schwaller and Morris (2011) is implemented and applied to 5 years (2012-2016) of observations. We further extend the procedure by a framework to take into account the data quality of each ground radar bin. Through these methods, we are able to assign a quality index to each matching SR-GR volume, and thus compute the GR calibration bias as a quality-weighted average of reflectivity differences in any sample of matching GR-SR volumes. We exemplify the idea of quality-weighted averaging by using the beam blockage fraction as the basis of a quality index. As a result, we can increase the consistency of SR and GR observations, and thus the precision of calibration bias estimates. The remaining scatter between GR and SR reflectivity as well as the variability of bias estimates between overpass events indicate, however, that other error sources are not yet fully addressed. Still, our study provides a framework to introduce any other quality variables that are considered relevant in a specific context. The code that implements our analysis is based on the wradlib open-source software library, and is, together with the data, publicly available to monitor radar calibration or to scrutinize long series of archived radar data back to December 1997, when TRMM became operational.}, language = {en} } @misc{OlenBookhagen2018, author = {Olen, Stephanie M. and Bookhagen, Bodo}, title = {Mapping Damage-Affected Areas after Natural Hazard Events Using Sentinel-1 Coherence Time Series}, series = {remote sensing}, journal = {remote sensing}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-417766}, pages = {19}, year = {2018}, abstract = {The emergence of the Sentinel-1A and 1B satellites now offers freely available and widely accessible Synthetic Aperture Radar (SAR) data. Near-global coverage and rapid repeat time (6-12 days) gives Sentinel-1 data the potential to be widely used for monitoring the Earth's surface. Subtle land-cover and land surface changes can affect the phase and amplitude of the C-band SAR signal, and thus the coherence between two images collected before and after such changes. Analysis of SAR coherence therefore serves as a rapidly deployable and powerful tool to track both seasonal changes and rapid surface disturbances following natural disasters. An advantage of using Sentinel-1 C-band radar data is the ability to easily construct time series of coherence for a region of interest at low cost. In this paper, we propose a new method for Potentially Affected Area (PAA) detection following a natural hazard event. Based on the coherence time series, the proposed method (1) determines the natural variability of coherence within each pixel in the region of interest, accounting for factors such as seasonality and the inherent noise of variable surfaces; and (2) compares pixel-by-pixel syn-event coherence to temporal coherence distributions to determine where statistically significant coherence loss has occurred. The user can determine to what degree the syn-event coherence value (e.g., 1st, 5th percentile of pre-event distribution) constitutes a PAA, and integrate pertinent regional data, such as population density, to rank and prioritise PAAs. We apply the method to two case studies, Sarpol-e, Iran following the 2017 Iran-Iraq earthquake, and a landslide-prone region of NW Argentina, to demonstrate how rapid identification and interpretation of potentially affected areas can be performed shortly following a natural hazard event.}, language = {en} } @misc{SiegmundSandersHeinrichetal.2016, author = {Siegmund, Jonatan Frederik and Sanders, Tanja G. M. and Heinrich, Ingo and Maaten, Ernst van der and Simard, Sonia and Helle, Gerhard and Donner, Reik Volker}, title = {Meteorological drivers of extremes in daily stem radius variations of beech, oak, and pine in Northeastern Germany}, series = {Frontiers in plant science}, journal = {Frontiers in plant science}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-407943}, pages = {14}, year = {2016}, abstract = {Observed recent and expected future increases in frequency and intensity of climatic extremes in central Europe may pose critical challenges for domestic tree species. Continuous dendrometer recordings provide a valuable source of information on tree stem radius variations, offering the possibility to study a tree's response to environmental influences at a high temporal resolution. In this study, we analyze stem radius variations (SRV) of three domestic tree species (beech, oak, and pine) from 2012 to 2014. We use the novel statistical approach of event coincidence analysis (ECA) to investigate the simultaneous occurrence of extreme daily weather conditions and extreme SRVs, where extremes are defined with respect to the common values at a given phase of the annual growth period. Besides defining extreme events based on individual meteorological variables, we additionally introduce conditional and joint ECA as new multivariate extensions of the original methodology and apply them for testing 105 different combinations of variables regarding their impact on SRV extremes. Our results reveal a strong susceptibility of all three species to the extremes of several meteorological variables. Yet, the inter-species differences regarding their response to the meteorological extremes are comparatively low. The obtained results provide a thorough extension of previous correlation-based studies by emphasizing on the timings of climatic extremes only. We suggest that the employed methodological approach should be further promoted in forest research regarding the investigation of tree responses to changing environmental conditions.}, language = {en} } @misc{HollsteinSeglGuanteretal.2016, author = {Hollstein, Andr{\´e} and Segl, Karl and Guanter, Luis and Brell, Maximilian and Enesco, Marta}, title = {Ready-to-Use methods for the detection of clouds, cirrus, snow, shadow, water and clear sky pixels in Sentinel-2 MSI images}, series = {remote sensing}, journal = {remote sensing}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-407938}, pages = {18}, year = {2016}, abstract = {Classification of clouds, cirrus, snow, shadows and clear sky areas is a crucial step in the pre-processing of optical remote sensing images and is a valuable input for their atmospheric correction. The Multi-Spectral Imager on board the Sentinel-2's of the Copernicus program offers optimized bands for this task and delivers unprecedented amounts of data regarding spatial sampling, global coverage, spectral coverage, and repetition rate. Efficient algorithms are needed to process, or possibly reprocess, those big amounts of data. Techniques based on top-of-atmosphere reflectance spectra for single-pixels without exploitation of external data or spatial context offer the largest potential for parallel data processing and highly optimized processing throughput. Such algorithms can be seen as a baseline for possible trade-offs in processing performance when the application of more sophisticated methods is discussed. We present several ready-to-use classification algorithms which are all based on a publicly available database of manually classified Sentinel-2A images. These algorithms are based on commonly used and newly developed machine learning techniques which drastically reduce the amount of time needed to update the algorithms when new images are added to the database. Several ready-to-use decision trees are presented which allow to correctly label about 91\% of the spectra within a validation dataset. While decision trees are simple to implement and easy to understand, they offer only limited classification skill. It improves to 98\% when the presented algorithm based on the classical Bayesian method is applied. This method has only recently been used for this task and shows excellent performance concerning classification skill and processing performance. A comparison of the presented algorithms with other commonly used techniques such as random forests, stochastic gradient descent, or support vector machines is also given. Especially random forests and support vector machines show similar classification skill as the classical Bayesian method.}, language = {en} }