@article{DoerrKrejca2020, author = {Doerr, Benjamin and Krejca, Martin S.}, title = {Significance-based estimation-of-distribution algorithms}, series = {IEEE transactions on evolutionary computation}, volume = {24}, journal = {IEEE transactions on evolutionary computation}, number = {6}, publisher = {Institute of Electrical and Electronics Engineers}, address = {New York, NY}, issn = {1089-778X}, doi = {10.1109/TEVC.2019.2956633}, pages = {1025 -- 1034}, year = {2020}, abstract = {Estimation-of-distribution algorithms (EDAs) are randomized search heuristics that create a probabilistic model of the solution space, which is updated iteratively, based on the quality of the solutions sampled according to the model. As previous works show, this iteration-based perspective can lead to erratic updates of the model, in particular, to bit-frequencies approaching a random boundary value. In order to overcome this problem, we propose a new EDA based on the classic compact genetic algorithm (cGA) that takes into account a longer history of samples and updates its model only with respect to information which it classifies as statistically significant. We prove that this significance-based cGA (sig-cGA) optimizes the commonly regarded benchmark functions OneMax (OM), LeadingOnes, and BinVal all in quasilinear time, a result shown for no other EDA or evolutionary algorithm so far. For the recently proposed stable compact genetic algorithm-an EDA that tries to prevent erratic model updates by imposing a bias to the uniformly distributed model-we prove that it optimizes OM only in a time exponential in its hypothetical population size. Similarly, we show that the convex search algorithm cannot optimize OM in polynomial time.}, language = {en} } @article{KreibichBottoMerzetal.2016, author = {Kreibich, Heidi and Botto, Anna and Merz, Bruno and Schr{\"o}ter, Kai}, title = {Probabilistic, Multivariable Flood Loss Modeling on the Mesoscale with BT-FLEMO}, series = {Risk analysis}, volume = {37}, journal = {Risk analysis}, number = {4}, publisher = {Wiley}, address = {Hoboken}, issn = {0272-4332}, doi = {10.1111/risa.12650}, pages = {774 -- 787}, year = {2016}, abstract = {Flood loss modeling is an important component for risk analyses and decision support in flood risk management. Commonly, flood loss models describe complex damaging processes by simple, deterministic approaches like depth-damage functions and are associated with large uncertainty. To improve flood loss estimation and to provide quantitative information about the uncertainty associated with loss modeling, a probabilistic, multivariable Bagging decision Tree Flood Loss Estimation MOdel (BT-FLEMO) for residential buildings was developed. The application of BT-FLEMO provides a probability distribution of estimated losses to residential buildings per municipality. BT-FLEMO was applied and validated at the mesoscale in 19 municipalities that were affected during the 2002 flood by the River Mulde in Saxony, Germany. Validation was undertaken on the one hand via a comparison with six deterministic loss models, including both depth-damage functions and multivariable models. On the other hand, the results were compared with official loss data. BT-FLEMO outperforms deterministic, univariable, and multivariable models with regard to model accuracy, although the prediction uncertainty remains high. An important advantage of BT-FLEMO is the quantification of prediction uncertainty. The probability distribution of loss estimates by BT-FLEMO well represents the variation range of loss estimates of the other models in the case study.}, language = {en} } @article{RoezerKreibichSchroeteretal.2019, author = {R{\"o}zer, Viktor and Kreibich, Heidi and Schr{\"o}ter, Kai and M{\"u}ller, Meike and Sairam, Nivedita and Doss-Gollin, James and Lall, Upmanu and Merz, Bruno}, title = {Probabilistic Models Significantly Reduce Uncertainty in Hurricane Harvey Pluvial Flood Loss Estimates}, series = {Earths future}, volume = {7}, journal = {Earths future}, number = {4}, publisher = {American Geophysical Union}, address = {Washington}, issn = {2328-4277}, doi = {10.1029/2018EF001074}, pages = {384 -- 394}, year = {2019}, abstract = {Pluvial flood risk is mostly excluded in urban flood risk assessment. However, the risk of pluvial flooding is a growing challenge with a projected increase of extreme rainstorms compounding with an ongoing global urbanization. Considered as a flood type with minimal impacts when rainfall rates exceed the capacity of urban drainage systems, the aftermath of rainfall-triggered flooding during Hurricane Harvey and other events show the urgent need to assess the risk of pluvial flooding. Due to the local extent and small-scale variations, the quantification of pluvial flood risk requires risk assessments on high spatial resolutions. While flood hazard and exposure information is becoming increasingly accurate, the estimation of losses is still a poorly understood component of pluvial flood risk quantification. We use a new probabilistic multivariable modeling approach to estimate pluvial flood losses of individual buildings, explicitly accounting for the associated uncertainties. Except for the water depth as the common most important predictor, we identified the drivers for having loss or not and for the degree of loss to be different. Applying this approach to estimate and validate building structure losses during Hurricane Harvey using a property level data set, we find that the reliability and dispersion of predictive loss distributions vary widely depending on the model and aggregation level of property level loss estimates. Our results show that the use of multivariable zero-inflated beta models reduce the 90\% prediction intervalsfor Hurricane Harvey building structure loss estimates on average by 78\% (totalling U.S.\$3.8 billion) compared to commonly used models.}, language = {en} }