@article{BelaidRabusKrestel2021, author = {Belaid, Mohamed Karim and Rabus, Maximilian and Krestel, Ralf}, title = {CrashNet}, series = {Data mining and knowledge discovery}, volume = {35}, journal = {Data mining and knowledge discovery}, number = {4}, publisher = {Springer}, address = {Dordrecht}, issn = {1384-5810}, doi = {10.1007/s10618-021-00761-9}, pages = {1688 -- 1709}, year = {2021}, abstract = {Destructive car crash tests are an elaborate, time-consuming, and expensive necessity of the automotive development process. Today, finite element method (FEM) simulations are used to reduce costs by simulating car crashes computationally. We propose CrashNet, an encoder-decoder deep neural network architecture that reduces costs further and models specific outcomes of car crashes very accurately. We achieve this by formulating car crash events as time series prediction enriched with a set of scalar features. Traditional sequence-to-sequence models are usually composed of convolutional neural network (CNN) and CNN transpose layers. We propose to concatenate those with an MLP capable of learning how to inject the given scalars into the output time series. In addition, we replace the CNN transpose with 2D CNN transpose layers in order to force the model to process the hidden state of the set of scalars as one time series. The proposed CrashNet model can be trained efficiently and is able to process scalars and time series as input in order to infer the results of crash tests. CrashNet produces results faster and at a lower cost compared to destructive tests and FEM simulations. Moreover, it represents a novel approach in the car safety management domain.}, language = {en} } @article{UllrichHegnauerNguyenetal.2021, author = {Ullrich, Sophie Louise and Hegnauer, Mark and Nguyen, Dung Viet and Merz, Bruno and Kwadijk, Jaap and Vorogushyn, Sergiy}, title = {Comparative evaluation of two types of stochastic weather generators for synthetic precipitation in the Rhine basin}, series = {Journal of hydrology}, volume = {601}, journal = {Journal of hydrology}, publisher = {Elsevier}, address = {Amsterdam [u.a.]}, issn = {0022-1694}, doi = {10.1016/j.jhydrol.2021.126544}, pages = {16}, year = {2021}, abstract = {Stochastic modeling of precipitation for estimation of hydrological extremes is an important element of flood risk assessment and management. The spatially consistent estimation of rainfall fields and their temporal variability remains challenging and is addressed by various stochastic weather generators. In this study, two types of weather generators are evaluated against observed data and benchmarked regarding their ability to simulate spatio-temporal precipitation fields in the Rhine catchment. A multi-site station-based weather generator uses an auto-regressive model and estimates the spatial correlation structure between stations. Another weather generator is raster-based and uses the nearest-neighbor resampling technique for reshuffling daily patterns while preserving the correlation structure between the observations. Both weather generators perform well and are comparable at the point (station) scale with regards to daily mean and 99.9th percentile precipitation as well as concerning wet/dry frequencies and transition probabilities. The areal extreme precipitation at the sub-basin scale is however overestimated in the station-based weather generator due to an overestimation of the correlation structure between individual stations. The auto-regressive model tends to generate larger rainfall fields in space for extreme precipitation than observed, particularly in summer. The weather generator based on nearest-neighbor resampling reproduces the observed daily and multiday (5, 10 and 20) extreme events in a similar magnitude. Improvements in performance regarding wet frequencies and transition probabilities are recommended for both models.}, language = {en} } @article{BoettcherMerzLischeidetal.2014, author = {B{\"o}ttcher, Steven and Merz, Christoph and Lischeid, Gunnar and Dannowski, Ralf}, title = {Using Isomap to differentiate between anthropogenic and natural effects on groundwater dynamics in a complex geological setting}, series = {Journal of hydrology}, volume = {519}, journal = {Journal of hydrology}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0022-1694}, doi = {10.1016/j.jhydrol.2014.09.048}, pages = {1634 -- 1641}, year = {2014}, abstract = {Due to increasing demands and competition for high quality groundwater resources in many parts of the world, there is an urgent need for efficient methods that shed light on the interplay between complex natural settings and anthropogenic impacts. Thus a new approach is introduced, that aims to identify and quantify the predominant processes or factors of influence that drive groundwater and lake water dynamics on a catchment scale. The approach involves a non-linear dimension reduction method called Isometric feature mapping (Isomap). This method is applied to time series of groundwater head and lake water level data from a complex geological setting in Northeastern Germany. Two factors explaining more than 95\% of the observed spatial variations are identified: (1) the anthropogenic impact of a waterworks in the study area and (2) natural groundwater recharge with different degrees of dampening at the respective sites of observation. The approach enables a presumption-free assessment to be made of the existing geological conception in the catchment, leading to an extension of the conception. Previously unknown hydraulic connections between two aquifers are identified, and connections revealed between surface water bodies and groundwater. (C) 2014 Elsevier B.V. All rights reserved.}, language = {en} } @article{DiGiacomoBindiParolaietal.2011, author = {Di Giacomo, Domenico and Bindi, Dino and Parolai, Stefano and Oth, Adrien}, title = {Residual analysis of teleseismic P-wave energy magnitude estimates: inter- and intrastation variability}, series = {Geophysical journal international}, volume = {185}, journal = {Geophysical journal international}, number = {3}, publisher = {Wiley-Blackwell}, address = {Malden}, issn = {0956-540X}, doi = {10.1111/j.1365-246X.2011.05019.x}, pages = {1444 -- 1454}, year = {2011}, abstract = {P>Computing the magnitude of an earthquake requires correcting for the propagation effects from the source to the receivers. This is often accomplished by performing numerical simulations using a suitable Earth model. In this work, the energy magnitude M(e) is considered and its determination is performed using theoretical spectral amplitude decay functions over teleseismic distances based on the global Earth model AK135Q. Since the high frequency part (above the corner frequency) of the source spectrum has to be considered in computing M(e), the influence of propagation and site effects may not be negligible and they could bias the single station M(e) estimations. Therefore, in this study we assess the inter- and intrastation distributions of errors by considering the M(e) residuals computed for a large data set of earthquakes recorded at teleseismic distances by seismic stations deployed worldwide. To separate the inter- and intrastation contribution of errors, we apply a maximum likelihood approach to the M(e) residuals. We show that the interstation errors (describing a sort of site effect for a station) are within +/- 0.2 magnitude units for most stations and their spatial distribution reflects the expected lateral variation affecting the velocity and attenuation of the Earth's structure in the uppermost layers, not accounted for by the 1-D AK135Q model. The variance of the intrastation error distribution (describing the record-to-record component of variability) is larger than the interstation one (0.240 against 0.159), and the spatial distribution of the errors is not random but shows specific patterns depending on the source-to-station paths. The set of coefficients empirically determined may be used in the future to account for the heterogeneities of the real Earth not considered in the theoretical calculations of the spectral amplitude decay functions used to correct the recorded data for propagation effects.}, language = {en} } @article{HammerOhrnbergerFaeh2013, author = {Hammer, Conny and Ohrnberger, Matthias and Faeh, Donat}, title = {Classifying seismic waveforms from scratch: a case study in the alpine environment}, series = {Geophysical journal international}, volume = {192}, journal = {Geophysical journal international}, number = {1}, publisher = {Oxford Univ. Press}, address = {Oxford}, issn = {0956-540X}, doi = {10.1093/gji/ggs036}, pages = {425 -- 439}, year = {2013}, abstract = {Nowadays, an increasing amount of seismic data is collected by daily observatory routines. The basic step for successfully analyzing those data is the correct detection of various event types. However, the visually scanning process is a time-consuming task. Applying standard techniques for detection like the STA/LTAtrigger still requires the manual control for classification. Here, we present a useful alternative. The incoming data stream is scanned automatically for events of interest. A stochastic classifier, called hidden Markov model, is learned for each class of interest enabling the recognition of highly variable waveforms. In contrast to other automatic techniques as neural networks or support vector machines the algorithm allows to start the classification from scratch as soon as interesting events are identified. Neither the tedious process of collecting training samples nor a time-consuming configuration of the classifier is required. An approach originally introduced for the volcanic task force action allows to learn classifier properties from a single waveform example and some hours of background recording. Besides a reduction of required workload this also enables to detect very rare events. Especially the latter feature provides a milestone point for the use of seismic devices in alpine warning systems. Furthermore, the system offers the opportunity to flag new signal classes that have not been defined before. We demonstrate the application of the classification system using a data set from the Swiss Seismological Survey achieving very high recognition rates. In detail we document all refinements of the classifier providing a step-by-step guide for the fast set up of a well-working classification system.}, language = {en} }