@article{GomezZapataZafrirPittoreetal.2022, author = {Gomez Zapata, Juan Camilo and Zafrir, Raquel and Pittore, Massimiliano and Merino, Yvonne}, title = {Towards a sensitivity analysis in seismic risk with probabilistic building exposure models}, series = {ISPRS International Journal of Geo-Information}, volume = {11}, journal = {ISPRS International Journal of Geo-Information}, number = {2}, publisher = {MDPI}, address = {Basel}, issn = {2220-9964}, doi = {10.3390/ijgi11020113}, pages = {38}, year = {2022}, abstract = {Efforts have been made in the past to enhance building exposure models on a regional scale with increasing spatial resolutions by integrating different data sources. This work follows a similar path and focuses on the downscaling of the existing SARA exposure model that was proposed for the residential building stock of the communes of Valparaiso and Vina del Mar (Chile). Although this model allowed great progress in harmonising building classes and characterising their differential physical vulnerabilities, it is now outdated, and in any case, it is spatially aggregated over large administrative units. Hence, to more accurately consider the impact of future earthquakes on these cities, it is necessary to employ more reliable exposure models. For such a purpose, we propose updating this existing model through a Bayesian approach by integrating ancillary data that has been made increasingly available from Volunteering Geo-Information (VGI) activities. Its spatial representation is also optimised in higher resolution aggregation units that avoid the inconvenience of having incomplete building-by-building footprints. A worst-case earthquake scenario is presented to calculate direct economic losses and highlight the degree of uncertainty imposed by exposure models in comparison with other parameters used to generate the seismic ground motions within a sensitivity analysis. This example study shows the great potential of using increasingly available VGI to update worldwide building exposure models as well as its importance in scenario-based seismic risk assessment.}, language = {en} } @article{MarkovicCarrizoKaercheretal.2017, author = {Markovic, Danijela and Carrizo, Savrina F. and Kaercher, Oskar and Walz, Ariane and David, Jonathan N. W.}, title = {Vulnerability of European freshwater catchments to climate change}, series = {Global change biology}, volume = {23}, journal = {Global change biology}, publisher = {Wiley}, address = {Hoboken}, issn = {1354-1013}, doi = {10.1111/gcb.13657}, pages = {3567 -- 3580}, year = {2017}, abstract = {Climate change is expected to exacerbate the current threats to freshwater ecosystems, yet multifaceted studies on the potential impacts of climate change on freshwater biodiversity at scales that inform management planning are lacking. The aim of this study was to fill this void through the development of a novel framework for assessing climate change vulnerability tailored to freshwater ecosystems. The three dimensions of climate change vulnerability are as follows: (i) exposure to climate change, (ii) sensitivity to altered environmental conditions and (iii) resilience potential. Our vulnerability framework includes 1685 freshwater species of plants, fishes, molluscs, odonates, amphibians, crayfish and turtles alongside key features within and between catchments, such as topography and connectivity. Several methodologies were used to combine these dimensions across a variety of future climate change models and scenarios. The resulting indices were overlaid to assess the vulnerability of European freshwater ecosystems at the catchment scale (18 783 catchments). The Balkan Lakes Ohrid and Prespa and Mediterranean islands emerge as most vulnerable to climate change. For the 2030s, we showed a consensus among the applied methods whereby up to 573 lake and river catchments are highly vulnerable to climate change. The anthropogenic disruption of hydrological habitat connectivity by dams is the major factor reducing climate change resilience. A gap analysis demonstrated that the current European protected area network covers <25\% of the most vulnerable catchments. Practical steps need to be taken to ensure the persistence of freshwater biodiversity under climate change. Priority should be placed on enhancing stakeholder cooperation at the major basin scale towards preventing further degradation of freshwater ecosystems and maintaining connectivity among catchments. The catchments identified as most vulnerable to climate change provide preliminary targets for development of climate change conservation management and mitigation strategies.}, language = {en} } @article{ThiekenApelMerz2015, author = {Thieken, Annegret and Apel, Heiko and Merz, Bruno}, title = {Assessing the probability of large-scale flood loss events: a case study for the river Rhine, Germany}, series = {Journal of flood risk management}, volume = {8}, journal = {Journal of flood risk management}, number = {3}, publisher = {Wiley-Blackwell}, address = {Hoboken}, issn = {1753-318X}, doi = {10.1111/jfr3.12091}, pages = {247 -- 262}, year = {2015}, abstract = {Flood risk analyses are often estimated assuming the same flood intensity along the river reach under study, i.e. discharges are calculated for a number of return periods T, e.g. 10 or 100 years, at several streamflow gauges. T-year discharges are regionalised and then transferred into T-year water levels, inundated areas and impacts. This approach assumes that (1) flood scenarios are homogeneous throughout a river basin, and (2) the T-year damage corresponds to the T-year discharge. Using a reach at the river Rhine, this homogeneous approach is compared with an approach that is based on four flood types with different spatial discharge patterns. For each type, a regression model was created and used in a Monte-Carlo framework to derive heterogeneous scenarios. Per scenario, four cumulative impact indicators were calculated: (1) the total inundated area, (2) the exposed settlement and industrial areas, (3) the exposed population and 4) the potential building loss. Their frequency curves were used to establish a ranking of eight past flood events according to their severity. The investigation revealed that the two assumptions of the homogeneous approach do not hold. It tends to overestimate event probabilities in large areas. Therefore, the generation of heterogeneous scenarios should receive more attention.}, language = {en} }