@article{DuethmannBolchFarinottietal.2015, author = {Duethmann, Doris and Bolch, Tobias and Farinotti, Daniel and Kriegel, David and Vorogushyn, Sergiy and Merz, Bruno and Pieczonka, Tino and Jiang, Tong and Su, Buda and G{\"u}ntner, Andreas}, title = {Attribution of streamflow trends in snow and glacier melt-dominated catchments of the Tarim River, Central Asia}, series = {Water resources research}, volume = {51}, journal = {Water resources research}, number = {6}, publisher = {American Geophysical Union}, address = {Washington}, issn = {0043-1397}, doi = {10.1002/2014WR016716}, pages = {4727 -- 4750}, year = {2015}, abstract = {Observed streamflow of headwater catchments of the Tarim River (Central Asia) increased by about 30\% over the period 1957-2004. This study aims at assessing to which extent these streamflow trends can be attributed to changes in air temperature or precipitation. The analysis includes a data-based approach using multiple linear regression and a simulation-based approach using a hydrological model. The hydrological model considers changes in both glacier area and surface elevation. It was calibrated using a multiobjective optimization algorithm with calibration criteria based on glacier mass balance and daily and interannual variations of discharge. The individual contributions to the overall streamflow trends from changes in glacier geometry, temperature, and precipitation were assessed using simulation experiments with a constant glacier geometry and with detrended temperature and precipitation time series. The results showed that the observed changes in streamflow were consistent with the changes in temperature and precipitation. In the Sari-Djaz catchment, increasing temperatures and related increase of glacier melt were identified as the dominant driver, while in the Kakshaal catchment, both increasing temperatures and increasing precipitation played a major role. Comparing the two approaches, an advantage of the simulation-based approach is the fact that it is based on process-based relationships implemented in the hydrological model instead of statistical links in the regression model. However, data-based approaches are less affected by model parameter and structural uncertainties and typically fast to apply. A complementary application of both approaches is recommended.}, language = {en} } @article{NguyenVietDungMerzBardossyetal.2015, author = {Nguyen Viet Dung, and Merz, Bruno and Bardossy, Andras and Apel, Heiko}, title = {Handling uncertainty in bivariate quantile estimation - An application to flood hazard analysis in the Mekong Delta}, series = {Journal of hydrology}, volume = {527}, journal = {Journal of hydrology}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0022-1694}, doi = {10.1016/j.jhydrol.2015.05.033}, pages = {704 -- 717}, year = {2015}, abstract = {The hydrological load causing flood hazard is in many instances not only determined by peak discharge, but is a multidimensional problem. While the methodology for multivariate frequency analysis is well established, the estimation of the associated uncertainty is rarely studied. In this paper, a method is developed to quantify the different sources of uncertainty for a bivariate flood frequency analysis. The method is exemplarily developed for the Mekong Delta (MD), one of the largest and most densely populated river deltas worldwide. Floods in the MD are the basis for the livelihoods of the local population, but they are also the major hazard. This hazard has, however, not been studied within the frame of a probabilistic flood hazard analysis. The nature of the floods in the MD suggests a bivariate approach, because the societal flood severity is determined by both peak discharge and flood volume. The uncertainty caused by selection of statistical models and parameter estimation procedures are analyzed by applying different models and methods. For the quantification of the sampling uncertainty two bootstrapping methods were applied. The developed bootstrapping-based uncertainty estimation method shows that large uncertainties are associated with the estimation of bivariate flood quantiles. This uncertainty is much larger than the model selection and fitting uncertainty. Given the rather long data series of 88 years, it is concluded that bivariate flood frequency analysis is expected to carry significant uncertainty and that the quantification and reduction of uncertainty merit greater attention. But despite this uncertainty the proposed approach has certainly major advantages compared to a univariate approach, because (a) it reflects the two essential aspects of floods in this region, (b) the uncertainties are inherent for every bivariate frequency analysis in hydrology due to the general limited length of observations and can hardly be avoided, and (c) a framework for the quantification of the uncertainties is given, which can be used and interpreted in the hazard assessment. In addition it is shown by a parametric bootstrapping experiment how longer observation time series can reduce the sampling uncertainty. Based on this finding it is concluded that bivariate frequency analyses in hydrology would greatly benefit from discharge time series augmented by proxy or historical data, or by causal hydrologic expansion of time series. (C) 2015 Elsevier B.V. All rights reserved.}, language = {en} } @article{SunLallMerzetal.2015, author = {Sun, Xun and Lall, Upmanu and Merz, Bruno and Nguyen Viet Dung,}, title = {Hierarchical Bayesian clustering for nonstationary flood frequency analysis: Application to trends of annual maximum flow in Germany}, series = {Water resources research}, volume = {51}, journal = {Water resources research}, number = {8}, publisher = {American Geophysical Union}, address = {Washington}, issn = {0043-1397}, doi = {10.1002/2015WR017117}, pages = {6586 -- 6601}, year = {2015}, abstract = {Especially for extreme precipitation or floods, there is considerable spatial and temporal variability in long term trends or in the response of station time series to large-scale climate indices. Consequently, identifying trends or sensitivity of these extremes to climate parameters can be marked by high uncertainty. When one develops a nonstationary frequency analysis model, a key step is the identification of potential trends or effects of climate indices on the station series. An automatic clustering procedure that effectively pools stations where there are similar responses is desirable to reduce the estimation variance, thus improving the identification of trends or responses, and accounting for spatial dependence. This paper presents a new hierarchical Bayesian approach for exploring homogeneity of response in large area data sets, through a multicomponent mixture model. The approach allows the reduction of uncertainties through both full pooling and partial pooling of stations across automatically chosen subsets of the data. We apply the model to study the trends in annual maximum daily stream flow at 68 gauges over Germany. The effects of changing the number of clusters and the parameters used for clustering are demonstrated. The results show that there are large, mainly upward trends in the gauges of the River Rhine Basin in Western Germany and along the main stream of the Danube River in the south, while there are also some small upward trends at gauges in Central and Northern Germany.}, language = {en} } @article{MerzVorogushynLalletal.2015, author = {Merz, Bruno and Vorogushyn, Sergiy and Lall, Upmanu and Viglione, Alberto and Bl{\"o}schl, G{\"u}nter}, title = {Charting unknown waters-On the role of surprise in flood risk assessment and management}, series = {Water resources research}, volume = {51}, journal = {Water resources research}, number = {8}, publisher = {American Geophysical Union}, address = {Washington}, issn = {0043-1397}, doi = {10.1002/2015WR017464}, pages = {6399 -- 6416}, year = {2015}, abstract = {Unexpected incidents, failures, and disasters are abundant in the history of flooding events. In this paper, we introduce the metaphors of terra incognita and terra maligna to illustrate unknown and wicked flood situations, respectively. We argue that surprise is a neglected element in flood risk assessment and management. Two sources of surprise are identified: (1) the complexity of flood risk systems, represented by nonlinearities, interdependencies, and nonstationarities and (2) cognitive biases in human perception and decision making. Flood risk assessment and management are particularly prone to cognitive biases due to the rarity and uniqueness of extremes, and the nature of human risk perception. We reflect on possible approaches to better understanding and reducing the potential for surprise and its adverse consequences which may be supported by conceptually charting maps that separate terra incognita from terra cognita, and terra maligna from terra benigna. We conclude that flood risk assessment and management should account for the potential for surprise and devastating consequences which will require a shift in thinking.}, language = {en} } @article{ThiekenApelMerz2015, author = {Thieken, Annegret and Apel, Heiko and Merz, Bruno}, title = {Assessing the probability of large-scale flood loss events: a case study for the river Rhine, Germany}, series = {Journal of flood risk management}, volume = {8}, journal = {Journal of flood risk management}, number = {3}, publisher = {Wiley-Blackwell}, address = {Hoboken}, issn = {1753-318X}, doi = {10.1111/jfr3.12091}, pages = {247 -- 262}, year = {2015}, abstract = {Flood risk analyses are often estimated assuming the same flood intensity along the river reach under study, i.e. discharges are calculated for a number of return periods T, e.g. 10 or 100 years, at several streamflow gauges. T-year discharges are regionalised and then transferred into T-year water levels, inundated areas and impacts. This approach assumes that (1) flood scenarios are homogeneous throughout a river basin, and (2) the T-year damage corresponds to the T-year discharge. Using a reach at the river Rhine, this homogeneous approach is compared with an approach that is based on four flood types with different spatial discharge patterns. For each type, a regression model was created and used in a Monte-Carlo framework to derive heterogeneous scenarios. Per scenario, four cumulative impact indicators were calculated: (1) the total inundated area, (2) the exposed settlement and industrial areas, (3) the exposed population and 4) the potential building loss. Their frequency curves were used to establish a ranking of eight past flood events according to their severity. The investigation revealed that the two assumptions of the homogeneous approach do not hold. It tends to overestimate event probabilities in large areas. Therefore, the generation of heterogeneous scenarios should receive more attention.}, language = {en} }