@article{NguyenVietDungMerzBardossyetal.2015, author = {Nguyen Viet Dung, and Merz, Bruno and Bardossy, Andras and Apel, Heiko}, title = {Handling uncertainty in bivariate quantile estimation - An application to flood hazard analysis in the Mekong Delta}, series = {Journal of hydrology}, volume = {527}, journal = {Journal of hydrology}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0022-1694}, doi = {10.1016/j.jhydrol.2015.05.033}, pages = {704 -- 717}, year = {2015}, abstract = {The hydrological load causing flood hazard is in many instances not only determined by peak discharge, but is a multidimensional problem. While the methodology for multivariate frequency analysis is well established, the estimation of the associated uncertainty is rarely studied. In this paper, a method is developed to quantify the different sources of uncertainty for a bivariate flood frequency analysis. The method is exemplarily developed for the Mekong Delta (MD), one of the largest and most densely populated river deltas worldwide. Floods in the MD are the basis for the livelihoods of the local population, but they are also the major hazard. This hazard has, however, not been studied within the frame of a probabilistic flood hazard analysis. The nature of the floods in the MD suggests a bivariate approach, because the societal flood severity is determined by both peak discharge and flood volume. The uncertainty caused by selection of statistical models and parameter estimation procedures are analyzed by applying different models and methods. For the quantification of the sampling uncertainty two bootstrapping methods were applied. The developed bootstrapping-based uncertainty estimation method shows that large uncertainties are associated with the estimation of bivariate flood quantiles. This uncertainty is much larger than the model selection and fitting uncertainty. Given the rather long data series of 88 years, it is concluded that bivariate flood frequency analysis is expected to carry significant uncertainty and that the quantification and reduction of uncertainty merit greater attention. But despite this uncertainty the proposed approach has certainly major advantages compared to a univariate approach, because (a) it reflects the two essential aspects of floods in this region, (b) the uncertainties are inherent for every bivariate frequency analysis in hydrology due to the general limited length of observations and can hardly be avoided, and (c) a framework for the quantification of the uncertainties is given, which can be used and interpreted in the hazard assessment. In addition it is shown by a parametric bootstrapping experiment how longer observation time series can reduce the sampling uncertainty. Based on this finding it is concluded that bivariate frequency analyses in hydrology would greatly benefit from discharge time series augmented by proxy or historical data, or by causal hydrologic expansion of time series. (C) 2015 Elsevier B.V. All rights reserved.}, language = {en} } @article{ThiekenApelMerz2015, author = {Thieken, Annegret and Apel, Heiko and Merz, Bruno}, title = {Assessing the probability of large-scale flood loss events: a case study for the river Rhine, Germany}, series = {Journal of flood risk management}, volume = {8}, journal = {Journal of flood risk management}, number = {3}, publisher = {Wiley-Blackwell}, address = {Hoboken}, issn = {1753-318X}, doi = {10.1111/jfr3.12091}, pages = {247 -- 262}, year = {2015}, abstract = {Flood risk analyses are often estimated assuming the same flood intensity along the river reach under study, i.e. discharges are calculated for a number of return periods T, e.g. 10 or 100 years, at several streamflow gauges. T-year discharges are regionalised and then transferred into T-year water levels, inundated areas and impacts. This approach assumes that (1) flood scenarios are homogeneous throughout a river basin, and (2) the T-year damage corresponds to the T-year discharge. Using a reach at the river Rhine, this homogeneous approach is compared with an approach that is based on four flood types with different spatial discharge patterns. For each type, a regression model was created and used in a Monte-Carlo framework to derive heterogeneous scenarios. Per scenario, four cumulative impact indicators were calculated: (1) the total inundated area, (2) the exposed settlement and industrial areas, (3) the exposed population and 4) the potential building loss. Their frequency curves were used to establish a ranking of eight past flood events according to their severity. The investigation revealed that the two assumptions of the homogeneous approach do not hold. It tends to overestimate event probabilities in large areas. Therefore, the generation of heterogeneous scenarios should receive more attention.}, language = {en} }