@article{BoersGoswamiRheinwaltetal.2019, author = {Boers, Niklas and Goswami, Bedartha and Rheinwalt, Aljoscha and Bookhagen, Bodo and Hoskins, Brian and Kurths, J{\"u}rgen}, title = {Complex networks reveal global pattern of extreme-rainfall teleconnections}, series = {Nature : the international weekly journal of science}, volume = {566}, journal = {Nature : the international weekly journal of science}, number = {7744}, publisher = {Nature Publ. Group}, address = {London}, issn = {0028-0836}, doi = {10.1038/s41586-018-0872-x}, pages = {373 -- 377}, year = {2019}, abstract = {Climatic observables are often correlated across long spatial distances, and extreme events, such as heatwaves or floods, are typically assumed to be related to such teleconnections(1,2). Revealing atmospheric teleconnection patterns and understanding their underlying mechanisms is of great importance for weather forecasting in general and extreme-event prediction in particular(3,4), especially considering that the characteristics of extreme events have been suggested to change under ongoing anthropogenic climate change(5-8). Here we reveal the global coupling pattern of extreme-rainfall events by applying complex-network methodology to high-resolution satellite data and introducing a technique that corrects for multiple-comparison bias in functional networks. We find that the distance distribution of significant connections (P < 0.005) around the globe decays according to a power law up to distances of about 2,500 kilometres. For longer distances, the probability of significant connections is much higher than expected from the scaling of the power law. We attribute the shorter, power-law-distributed connections to regional weather systems. The longer, super-power-law-distributed connections form a global rainfall teleconnection pattern that is probably controlled by upper-level Rossby waves. We show that extreme-rainfall events in the monsoon systems of south-central Asia, east Asia and Africa are significantly synchronized. Moreover, we uncover concise links between south-central Asia and the European and North American extratropics, as well as the Southern Hemisphere extratropics. Analysis of the atmospheric conditions that lead to these teleconnections confirms Rossby waves as the physical mechanism underlying these global teleconnection patterns and emphasizes their crucial role in dynamical tropical-extratropical couplings. Our results provide insights into the function of Rossby waves in creating stable, global-scale dependencies of extreme-rainfall events, and into the potential predictability of associated natural hazards.}, language = {en} } @article{BrellSeglGuanteretal.2019, author = {Brell, Maximilian and Segl, Karl and Guanter, Luis and Bookhagen, Bodo}, title = {3D hyperspectral point cloud generation}, series = {ISPRS journal of photogrammetry and remote sensing : official publication of the International Society for Photogrammetry and Remote Sensing}, volume = {149}, journal = {ISPRS journal of photogrammetry and remote sensing : official publication of the International Society for Photogrammetry and Remote Sensing}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0924-2716}, doi = {10.1016/j.isprsjprs.2019.01.022}, pages = {200 -- 214}, year = {2019}, abstract = {Remote Sensing technologies allow to map biophysical, biochemical, and earth surface parameters of the land surface. Of especial interest for various applications in environmental and urban sciences is the combination of spectral and 3D elevation information. However, those two data streams are provided separately by different instruments, namely airborne laser scanner (ALS) for elevation and a hyperspectral imager (HSI) for high spectral resolution data. The fusion of ALS and HSI data can thus lead to a single data entity consistently featuring rich structural and spectral information. In this study, we present the application of fusing the first pulse return information from ALS data at a sub-decimeter spatial resolution with the lower-spatial resolution hyperspectral information available from the HSI into a hyperspectral point cloud (HSPC). During the processing, a plausible hyperspectral spectrum is assigned to every first-return ALS point. We show that the complementary implementation of spectral and 3D information at the point-cloud scale improves object-based classification and information extraction schemes. This improvements have great potential for numerous land cover mapping and environmental applications.}, language = {en} } @misc{SmithRheinwaltBookhagen2019, author = {Smith, Taylor and Rheinwalt, Aljoscha and Bookhagen, Bodo}, title = {Determining the optimal grid resolution for topographic analysis on an airborne lidar dataset}, series = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, number = {725}, issn = {1866-8372}, doi = {10.25932/publishup-43016}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-430165}, pages = {475 -- 489}, year = {2019}, abstract = {Digital elevation models (DEMs) are a gridded representation of the surface of the Earth and typically contain uncertainties due to data collection and processing. Slope and aspect estimates on a DEM contain errors and uncertainties inherited from the representation of a continuous surface as a grid (referred to as truncation error; TE) and from any DEM uncertainty. We analyze in detail the impacts of TE and propagated elevation uncertainty (PEU) on slope and aspect. Using synthetic data as a control, we define functions to quantify both TE and PEU for arbitrary grids. We then develop a quality metric which captures the combined impact of both TE and PEU on the calculation of topographic metrics. Our quality metric allows us to examine the spatial patterns of error and uncertainty in topographic metrics and to compare calculations on DEMs of different sizes and accuracies. Using lidar data with point density of ∼10 pts m-2 covering Santa Cruz Island in southern California, we are able to generate DEMs and uncertainty estimates at several grid resolutions. Slope (aspect) errors on the 1 m dataset are on average 0.3∘ (0.9∘) from TE and 5.5∘ (14.5∘) from PEU. We calculate an optimal DEM resolution for our SCI lidar dataset of 4 m that minimizes the error bounds on topographic metric calculations due to the combined influence of TE and PEU for both slope and aspect calculations over the entire SCI. Average slope (aspect) errors from the 4 m DEM are 0.25∘ (0.75∘) from TE and 5∘ (12.5∘) from PEU. While the smallest grid resolution possible from the high-density SCI lidar is not necessarily optimal for calculating topographic metrics, high point-density data are essential for measuring DEM uncertainty across a range of resolutions.}, language = {en} } @article{PurintonBookhagen2019, author = {Purinton, Benjamin and Bookhagen, Bodo}, title = {Introducing PebbleCounts}, series = {Earth Surface Dynamics}, volume = {2019}, journal = {Earth Surface Dynamics}, number = {7}, publisher = {Copernicus Publ}, address = {G{\"o}ttingen}, issn = {2196-6311}, doi = {10.5194/esurf-7-859-2019}, pages = {859 -- 877}, year = {2019}, abstract = {Grain-size distributions are a key geomorphic metric of gravel-bed rivers. Traditional measurement methods include manual counting or photo sieving, but these are achievable only at the 1-10 ㎡ scale. With the advent of drones and increasingly high-resolution cameras, we can now generate orthoimagery over hectares at millimeter to centimeter resolution. These scales, along with the complexity of high-mountain rivers, necessitate different approaches for photo sieving. As opposed to other image segmentation methods that use a watershed approach, our open-source algorithm, PebbleCounts, relies on k-means clustering in the spatial and spectral domain and rapid manual selection of well-delineated grains. This improves grain-size estimates for complex riverbed imagery, without post-processing. We also develop a fully automated method, PebbleCountsAuto, that relies on edge detection and filtering suspect grains, without the k-means clustering or manual selection steps. The algorithms are tested in controlled indoor conditions on three arrays of pebbles and then applied to 12 × 1 ㎡ orthomosaic clips of high-energy mountain rivers collected with a camera-on-mast setup (akin to a low-flying drone). A 20-pixel b-axis length lower truncation is necessary for attaining accurate grain-size distributions. For the k-means PebbleCounts approach, average percentile bias and precision are 0.03 and 0.09 ψ, respectively, for ∼1.16 mm pixel⁻¹ images, and 0.07 and 0.05 ψ for one 0.32 mm pixel⁻¹ image. The automatic approach has higher bias and precision of 0.13 and 0.15 ψ, respectively, for ∼1.16 mm pixel⁻¹ images, but similar values of -0.06 and 0.05 ψ for one 0.32 mm pixel⁻¹ image. For the automatic approach, only at best 70 \% of the grains are correct identifications, and typically around 50 \%. PebbleCounts operates most effectively at the 1 ㎡ patch scale, where it can be applied in ∼5-10 min on many patches to acquire accurate grain-size data over 10-100 ㎡ areas. These data can be used to validate PebbleCountsAuto, which may be applied at the scale of entire survey sites (102-104 ㎡ ). We synthesize results and recommend best practices for image collection, orthomosaic generation, and grain-size measurement using both algorithms.}, language = {en} } @article{SmithRheinwaltBookhagen2019, author = {Smith, Taylor and Rheinwalt, Aljoscha and Bookhagen, Bodo}, title = {Determining the optimal grid resolution for topographic analysis on an airborne lidar dataset}, series = {Earth Surface Dynamics}, volume = {7}, journal = {Earth Surface Dynamics}, publisher = {Copernicus Publ.}, address = {G{\"o}ttingen}, issn = {2196-6311}, doi = {10.5194/esurf-7-475-2019}, pages = {475 -- 489}, year = {2019}, abstract = {Digital elevation models (DEMs) are a gridded representation of the surface of the Earth and typically contain uncertainties due to data collection and processing. Slope and aspect estimates on a DEM contain errors and uncertainties inherited from the representation of a continuous surface as a grid (referred to as truncation error; TE) and from any DEM uncertainty. We analyze in detail the impacts of TE and propagated elevation uncertainty (PEU) on slope and aspect. Using synthetic data as a control, we define functions to quantify both TE and PEU for arbitrary grids. We then develop a quality metric which captures the combined impact of both TE and PEU on the calculation of topographic metrics. Our quality metric allows us to examine the spatial patterns of error and uncertainty in topographic metrics and to compare calculations on DEMs of different sizes and accuracies. Using lidar data with point density of ∼10 pts m-2 covering Santa Cruz Island in southern California, we are able to generate DEMs and uncertainty estimates at several grid resolutions. Slope (aspect) errors on the 1 m dataset are on average 0.3∘ (0.9∘) from TE and 5.5∘ (14.5∘) from PEU. We calculate an optimal DEM resolution for our SCI lidar dataset of 4 m that minimizes the error bounds on topographic metric calculations due to the combined influence of TE and PEU for both slope and aspect calculations over the entire SCI. Average slope (aspect) errors from the 4 m DEM are 0.25∘ (0.75∘) from TE and 5∘ (12.5∘) from PEU. While the smallest grid resolution possible from the high-density SCI lidar is not necessarily optimal for calculating topographic metrics, high point-density data are essential for measuring DEM uncertainty across a range of resolutions.}, language = {en} } @misc{PurintonBookhagen2019, author = {Purinton, Benjamin and Bookhagen, Bodo}, title = {Introducing PebbleCounts}, series = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, number = {783}, issn = {1866-8372}, doi = {10.25932/publishup-43946}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-439468}, pages = {21}, year = {2019}, abstract = {Grain-size distributions are a key geomorphic metric of gravel-bed rivers. Traditional measurement methods include manual counting or photo sieving, but these are achievable only at the 1-10 ㎡ scale. With the advent of drones and increasingly high-resolution cameras, we can now generate orthoimagery over hectares at millimeter to centimeter resolution. These scales, along with the complexity of high-mountain rivers, necessitate different approaches for photo sieving. As opposed to other image segmentation methods that use a watershed approach, our open-source algorithm, PebbleCounts, relies on k-means clustering in the spatial and spectral domain and rapid manual selection of well-delineated grains. This improves grain-size estimates for complex riverbed imagery, without post-processing. We also develop a fully automated method, PebbleCountsAuto, that relies on edge detection and filtering suspect grains, without the k-means clustering or manual selection steps. The algorithms are tested in controlled indoor conditions on three arrays of pebbles and then applied to 12 × 1 ㎡ orthomosaic clips of high-energy mountain rivers collected with a camera-on-mast setup (akin to a low-flying drone). A 20-pixel b-axis length lower truncation is necessary for attaining accurate grain-size distributions. For the k-means PebbleCounts approach, average percentile bias and precision are 0.03 and 0.09 ψ, respectively, for ∼1.16 mm pixel⁻¹ images, and 0.07 and 0.05 ψ for one 0.32 mm pixel⁻¹ image. The automatic approach has higher bias and precision of 0.13 and 0.15 ψ, respectively, for ∼1.16 mm pixel⁻¹ images, but similar values of -0.06 and 0.05 ψ for one 0.32 mm pixel⁻¹ image. For the automatic approach, only at best 70 \% of the grains are correct identifications, and typically around 50 \%. PebbleCounts operates most effectively at the 1 ㎡ patch scale, where it can be applied in ∼5-10 min on many patches to acquire accurate grain-size data over 10-100 ㎡ areas. These data can be used to validate PebbleCountsAuto, which may be applied at the scale of entire survey sites (102-104 ㎡ ). We synthesize results and recommend best practices for image collection, orthomosaic generation, and grain-size measurement using both algorithms.}, language = {en} } @misc{BriegerHerzschuhPestryakovaetal.2019, author = {Brieger, Frederic and Herzschuh, Ulrike and Pestryakova, Luidmila Agafyevna and Bookhagen, Bodo and Zakharov, Evgenii S. and Kruse, Stefan}, title = {Advances in the derivation of Northeast Siberian forest metrics using high-resolution UAV-based photogrammetric point clouds}, series = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, journal = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, number = {1337}, issn = {1866-8372}, doi = {10.25932/publishup-47331}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-473318}, pages = {24}, year = {2019}, abstract = {Forest structure is a crucial component in the assessment of whether a forest is likely to act as a carbon sink under changing climate. Detailed 3D structural information about the tundra-taiga ecotone of Siberia is mostly missing and still underrepresented in current research due to the remoteness and restricted accessibility. Field based, high-resolution remote sensing can provide important knowledge for the understanding of vegetation properties and dynamics. In this study, we test the applicability of consumer-grade Unmanned Aerial Vehicles (UAVs) for rapid calculation of stand metrics in treeline forests. We reconstructed high-resolution photogrammetric point clouds and derived canopy height models for 10 study sites from NE Chukotka and SW Yakutia. Subsequently, we detected individual tree tops using a variable-window size local maximum filter and applied a marker-controlled watershed segmentation for the delineation of tree crowns. With this, we successfully detected 67.1\% of the validation individuals. Simple linear regressions of observed and detected metrics show a better correlation (R2) and lower relative root mean square percentage error (RMSE\%) for tree heights (mean R2 = 0.77, mean RMSE\% = 18.46\%) than for crown diameters (mean R2 = 0.46, mean RMSE\% = 24.9\%). The comparison between detected and observed tree height distributions revealed that our tree detection method was unable to representatively identify trees <2 m. Our results show that plot sizes for vegetation surveys in the tundra-taiga ecotone should be adapted to the forest structure and have a radius of >15-20 m to capture homogeneous and representative forest stands. Additionally, we identify sources of omission and commission errors and give recommendations for their mitigation. In summary, the efficiency of the used method depends on the complexity of the forest's stand structure.}, language = {en} } @article{LoiblBookhagenValadeetal.2019, author = {Loibl, David and Bookhagen, Bodo and Valade, Sebastien and Schneider, Christoph}, title = {OSARIS, the "Open Source SAR Investigation System" for Automatized Parallel InSAR Processing of Sentinel-1 Time Series Data With Special Emphasis on Cryosphere Applications}, series = {Frontiers in Earth Science}, volume = {7}, journal = {Frontiers in Earth Science}, publisher = {Frontiers Media}, address = {Lausanne}, issn = {2296-6463}, doi = {10.3389/feart.2019.00172}, pages = {20}, year = {2019}, abstract = {With the advent of the two Sentinel-1 (S1) satellites, Synthetic Aperture Radar (SAR) data with high temporal and spatial resolution are freely available. This provides a promising framework to facilitate detailed investigations of surface instabilities and movements on large scales with high temporal resolution, but also poses substantial processing challenges because of storage and computation requirements. Methods are needed to efficiently detect short term changes in dynamic environments. Approaches considering pair-wise processing of a series of consecutive scenes to retain maximum temporal resolution in conjunction with time series analyses are required. Here we present OSARIS, the "Open Source SAR Investigation System," as a framework to process large stacks of S1 data on high-performance computing clusters. Based on Generic Mapping Tools SAR, shell scripts, and the workload manager Slurm, OSARIS provides an open and modular framework combining parallelization of high-performance C programs, flexible processing schemes, convenient configuration, and generation of geocoded stacks of analysis-ready base data, including amplitude, phase, coherence, and unwrapped interferograms. Time series analyses can be conducted by applying automated modules to the data stacks. The capabilities of OSARIS are demonstrated in a case study from the northwestern Tien Shan, Central Asia. After merging of slices, a total of 80 scene pairs were processed from 174 total input scenes. The coherence time series exhibits pronounced seasonal variability, with relatively high coherence values prevailing during the summer months in the nival zone. As an example of a time series analysis module, we present OSARIS' "Unstable Coherence Metric" which identifies pixels affected by significant drops from high to low coherence values. Measurements of motion provided by LOSD measurements require careful evaluation because interferometric phase unwrapping is prone to errors. Here, OSARIS provides a series of modules to detect and mask unwrapping errors, correct for atmospheric disturbances, and remove large-scale trends. Wall clock processing time for the case study (area ~9,000 km2) was ~12 h 4 min on a machine with 400 cores and 2 TB RAM. In total, ~12 d 10 h 44 min (~96\%) were saved through parallelization. A comparison of selected OSARIS datasets to results from two state-of-the-art SAR processing suites, ISCE and SNAP, shows that OSARIS provides products of competitive quality despite its high level of automatization. OSARIS thus facilitates efficient S1-based region-wide investigations of surface movement events over multiple years.}, language = {en} } @article{BriegerHerzschuhPestryakovaetal.2019, author = {Brieger, Frederic and Herzschuh, Ulrike and Pestryakova, Luidmila Agafyevna and Bookhagen, Bodo and Zakharov, Evgenii S. and Kruse, Stefan}, title = {Advances in the Derivation of Northeast Siberian Forest Metrics Using High-Resolution UAV-Based Photogrammetric Point Clouds}, series = {Remote sensing}, volume = {11}, journal = {Remote sensing}, number = {12}, publisher = {MDPI}, address = {Basel}, issn = {2072-4292}, doi = {10.3390/rs11121447}, pages = {24}, year = {2019}, abstract = {Forest structure is a crucial component in the assessment of whether a forest is likely to act as a carbon sink under changing climate. Detailed 3D structural information about the tundra-taiga ecotone of Siberia is mostly missing and still underrepresented in current research due to the remoteness and restricted accessibility. Field based, high-resolution remote sensing can provide important knowledge for the understanding of vegetation properties and dynamics. In this study, we test the applicability of consumer-grade Unmanned Aerial Vehicles (UAVs) for rapid calculation of stand metrics in treeline forests. We reconstructed high-resolution photogrammetric point clouds and derived canopy height models for 10 study sites from NE Chukotka and SW Yakutia. Subsequently, we detected individual tree tops using a variable-window size local maximum filter and applied a marker-controlled watershed segmentation for the delineation of tree crowns. With this, we successfully detected 67.1\% of the validation individuals. Simple linear regressions of observed and detected metrics show a better correlation (R2) and lower relative root mean square percentage error (RMSE\%) for tree heights (mean R2 = 0.77, mean RMSE\% = 18.46\%) than for crown diameters (mean R2 = 0.46, mean RMSE\% = 24.9\%). The comparison between detected and observed tree height distributions revealed that our tree detection method was unable to representatively identify trees <2 m. Our results show that plot sizes for vegetation surveys in the tundra-taiga ecotone should be adapted to the forest structure and have a radius of >15-20 m to capture homogeneous and representative forest stands. Additionally, we identify sources of omission and commission errors and give recommendations for their mitigation. In summary, the efficiency of the used method depends on the complexity of the forest's stand structure.}, language = {en} } @article{WeldeabRuehlemannBookhagenetal.2019, author = {Weldeab, Syee and R{\"u}hlemann, Carsten and Bookhagen, Bodo and Pausata, Francesco S. R. and Perez-Lua, Fabiola M.}, title = {Enhanced Himalayan glacial melting during YD and H1 recorded in the Northern Bay of Bengal}, series = {Geochemistry, geophysics, geosystems}, volume = {20}, journal = {Geochemistry, geophysics, geosystems}, number = {5}, publisher = {American Geophysical Union}, address = {Washington}, issn = {1525-2027}, doi = {10.1029/2018GC008065}, pages = {2449 -- 2461}, year = {2019}, abstract = {Ocean-land thermal feedback mechanisms in the Indian Summer Monsoon (ISM) domain are an important but not well understood component of regional climate dynamics. Here we present a O-18 record analyzed in the mixed-layer dwelling planktonic foraminifer Globigerinoides ruber (sensu stricto) from the northernmost Bay of Bengal (BoB). The O-18 time series provides a spatially integrated measure of monsoonal precipitation and Himalayan meltwater runoff into the northern BoB and reveals two brief episodes of anomalously low O-18 values between 16.30.4 and 160.5 and 12.60.4 and 12.30.4 thousand years before present. The timing of these events is centered at Heinrich event 1 and the Younger Dryas, well-known phases of weak northern hemisphere monsoon systems. Numerical climate model experiments, simulating Heinrich event-like conditions, suggest a surface warming over the monsoon-dominated Himalaya and foreland in response to ISM weakening. Corroborating the simulation results, our analysis of published moraine exposure ages in the monsoon-dominated Himalaya indicates enhanced glacier retreats that, considering age model uncertainties, coincide and overlap with the episodes of anomalously low O-18 values in the northernmost BoB. Our climate proxy and simulation results provide insights into past regional climate dynamics, suggesting reduced cloud cover, increased solar radiation, and air warming of the Himalaya and foreland areas and, as a result, glacier mass losses in response to weakened ISM. Plain Language Summary Indian Summer Monsoon rainfall and Himalayan glacier/snow melts constitute the main water source for the densely populated Indian subcontinent. Better understanding of how future climate changes will affect the monsoon rainfall and Himalayan glaciers requires a long climate record. In this study, we create a 13,000-year-long climate record that allows us to better understand the response of Indian Summer Monsoon rainfall and Himalayan glaciers to past climate changes. The focus of our study is the time window between 9,000 and 22,000 years ago, an episode where the global climate experienced large and rapid changes. Our sediment record from the northern Bay of Bengal and climate change simulation indicate that during episodes of weak monsoon, the melting of the Himalayan glaciers increases substantially significantly. This is because the weakening of the monsoon results in less cloud cover and, as a result, the surface receives more sunlight and causes glacier melting.}, language = {en} }