@article{BeauvalHainzlScherbaum2006, author = {Beauval, Celine and Hainzl, Sebastian and Scherbaum, Frank}, title = {The impact of the spatial uniform distribution of seismicity on probabilistic seismic-hazard estimation}, series = {Bulletin of the Seismological Society of America}, volume = {96}, journal = {Bulletin of the Seismological Society of America}, number = {6}, publisher = {GeoScienceWorld}, address = {Alexandria, Va.}, issn = {0037-1106}, doi = {10.1785/0120060073}, pages = {2465 -- 2471}, year = {2006}, abstract = {The first step in the estimation of probabilistic seismic hazard in a region commonly consists of the definition and characterization of the relevant seismic sources. Because in low-seismicity regions seismicity is often rather diffuse and faults are difficult to identify, large areal source zones are mostly used. The corresponding hypothesis is that seismicity is uniformly distributed inside each areal seismic source zone. In this study, the impact of this hypothesis on the probabilistic hazard estimation is quantified through the generation of synthetic spatial seismicity distributions. Fractal seismicity distributions are generated inside a given source zone and probabilistic hazard is computed for a set of sites located inside this zone. In our study, the impact of the spatial seismicity distribution is defined as the deviation from the hazard value obtained for a spatially uniform seismicity distribution. From the generation of a large number of synthetic distributions, the correlation between the fractal dimension D and the impact is derived. The results show that the assumption of spatially uniform seismicity tends to bias the hazard to higher values. The correlation can be used to determine the systematic biases and uncertainties for hazard estimations in real cases, where the fractal dimension has been determined. We apply the technique in Germany (Cologne area) and in France (Alps).}, language = {en} } @article{RichterHainzlDahmetal.2020, author = {Richter, Gudrun and Hainzl, Sebastian and Dahm, Torsten and Z{\"o}ller, Gert}, title = {Stress-based, statistical modeling of the induced seismicity at the Groningen gas field}, series = {Environmental earth sciences}, volume = {79}, journal = {Environmental earth sciences}, number = {11}, publisher = {Springer}, address = {New York}, issn = {1866-6280}, doi = {10.1007/s12665-020-08941-4}, pages = {15}, year = {2020}, abstract = {Groningen is the largest onshore gas field under production in Europe. The pressure depletion of the gas field started in 1963. In 1991, the first induced micro-earthquakes have been located at reservoir level with increasing rates in the following decades. Most of these events are of magnitude less than 2.0 and cannot be felt. However, maximum observed magnitudes continuously increased over the years until the largest, significant event with ML=3.6 was recorded in 2014, which finally led to the decision to reduce the production. This causal sequence displays the crucial role of understanding and modeling the relation between production and induced seismicity for economic planing and hazard assessment. Here we test whether the induced seismicity related to gas exploration can be modeled by the statistical response of fault networks with rate-and-state-dependent frictional behavior. We use the long and complete local seismic catalog and additionally detailed information on production-induced changes at the reservoir level to test different seismicity models. Both the changes of the fluid pressure and of the reservoir compaction are tested as input to approximate the Coulomb stress changes. We find that the rate-and-state model with a constant tectonic background seismicity rate can reproduce the observed long delay of the seismicity onset. In contrast, so-called Coulomb failure models with instantaneous earthquake nucleation need to assume that all faults are initially far from a critical state of stress to explain the delay. Our rate-and-state model based on the fluid pore pressure fits the spatiotemporal pattern of the seismicity best, where the fit further improves by taking the fault density and orientation into account. Despite its simplicity with only three free parameters, the rate-and-state model can reproduce the main statistical features of the observed activity.}, language = {en} } @article{FaenzaHainzlScherbaum2009, author = {Faenza, Licia and Hainzl, Sebastian and Scherbaum, Frank}, title = {Statistical analysis of the Central-Europe seismicity}, issn = {0040-1951}, doi = {10.1016/j.tecto.2008.04.030}, year = {2009}, abstract = {The aim of this paper is to characterize the spatio-temporal distribution of Central-Europe seismicity. Specifically, by using a non-parametric statistical approach, the proportional hazard model, leading to an empirical estimation of the hazard function, we provide some constrains on the time behavior of earthquake generation mechanisms. The results indicate that the most conspicuous characteristics of M-w 4.0+ earthquakes is a temporal clustering lasting a couple of years. This suggests that the probability of occurrence increases immediately after a previous event. After a few years, the process becomes almost time independent. Furthermore, we investigate the cluster properties of the seismicity of Central-Europe, by comparing the obtained result with the one of synthetic catalogs generated by the epidemic type aftershock sequences (ETAS) model, which previously have been successfully applied for short term clustering. Our results indicate that the ETAS is not well suited to describe the seismicity as a whole, while it is able to capture the features of the short- term behaviour. Remarkably, similar results have been previously found for Italy using a higher magnitude threshold.}, language = {en} } @article{Hainzl2004, author = {Hainzl, Sebastian}, title = {Seismicity patterns of earthquake swarms due to fluid intrusion and stress triggering}, issn = {0956-540X}, year = {2004}, abstract = {Earthquake swarms are often assumed to result from an intrusion of fluids into the seismogenic zone, causing seismicity patterns which significantly differ from aftershock sequences. But neither the temporal evolution nor the energy release of earthquake swarms is generally well understood. Because of the lack of descriptive empirical laws, the comparison with model simulations is typically restricted to aspects of the overall behaviour such as the frequency- magnitude distribution. However, previous investigations into a large earthquake swarm which occurred in the year 2000 in Vogtland/northwest Bohemia, Central Europe, revealed some well-defined characteristics which allow a rigorous test of model assumptions. In this study, simulations are performed of a discretized fault plane embedded in a 3-D elastic half- space. Earthquakes are triggered by fluid intrusion as well as by co-seismic and post-seismic stress changes. The model is able to reproduce the main observations, such as the fractal temporal occurrence of earthquakes, embedded aftershock sequences, and a power-law increase of the average seismic moment release. All these characteristics are found to result from stress triggering, whereas fluid diffusion is manifested in the spatiotemporal spreading of the hypocentres}, language = {en} } @misc{WoithPetersenHainzletal.2018, author = {Woith, Heiko and Petersen, Gesa Maria and Hainzl, Sebastian and Dahm, Torsten}, title = {Review: Can Animals Predict Earthquakes?}, series = {Bulletin of the Seismological Society of America}, volume = {108}, journal = {Bulletin of the Seismological Society of America}, number = {3A}, publisher = {Seismological Society of America}, address = {Albany}, issn = {0037-1106}, doi = {10.1785/0120170313}, pages = {1031 -- 1045}, year = {2018}, abstract = {In public perception, abnormal animal behavior is widely assumed to be a potential earthquake precursor, in strong contrast to the viewpoint in natural sciences. Proponents of earthquake prediction via animals claim that animals feel and react abnormally to small changes in environmental and physico-chemical parameters related to the earthquake preparation process. In seismology, however, observational evidence for changes of physical parameters before earthquakes is very weak. In this study, we reviewed 180 publications regarding abnormal animal behavior before earthquakes and analyze and discuss them with respect to (1) magnitude-distance relations, (2) foreshock activity, and (3) the quality and length of the published observations. More than 700 records of claimed animal precursors related to 160 earthquakes are reviewed with unusual behavior of more than 130 species. The precursor time ranges from months to seconds prior to the earthquakes, and the distances from a few to hundreds of kilometers. However, only 14 time series were published, whereas all other records are single observations. The time series are often short (the longest is 1 yr), or only small excerpts of the full data set are shown. The probability density of foreshocks and the occurrence of animal precursors are strikingly similar, suggesting that at least parts of the reported animal precursors are in fact related to foreshocks. Another major difficulty for a systematic and statistical analysis is the high diversity of data, which are often only anecdotal and retrospective. The study clearly demonstrates strong weaknesses or even deficits in many of the published reports on possible abnormal animal behavior. To improve the research on precursors, we suggest a scheme of yes and no questions to be assessed to ensure the quality of such claims.}, language = {en} } @misc{DahmBeckerBischoffetal.2013, author = {Dahm, Torsten and Becker, Dirk and Bischoff, Monika and Cesca, Simone and Dost, B. and Fritschen, R. and Hainzl, Sebastian and Klose, C. D. and Kuhn, D. and Lasocki, S. and Meier, Thomas and Ohrnberger, Matthias and Rivalta, Eleonora and Wegler, Ulrich and Husen, Stephan}, title = {Recommendation for the discrimination of human-related and natural seismicity}, series = {Journal of seismology}, volume = {17}, journal = {Journal of seismology}, number = {1}, publisher = {Springer}, address = {Dordrecht}, issn = {1383-4649}, doi = {10.1007/s10950-012-9295-6}, pages = {197 -- 202}, year = {2013}, abstract = {Various techniques are utilized by the seismological community, extractive industries, energy and geoengineering companies to identify earthquake nucleation processes in close proximity to engineering operation points. These operations may comprise fluid extraction or injections, artificial water reservoir impoundments, open pit and deep mining, deep geothermal power generations or carbon sequestration. In this letter to the editor, we outline several lines of investigation that we suggest to follow to address the discrimination problem between natural seismicity and seismic events induced or triggered by geoengineering activities. These suggestions have been developed by a group of experts during several meetings and workshops, and we feel that their publication as a summary report is helpful for the geoscientific community. Specific investigation procedures and discrimination approaches, on which our recommendations are based, are also published in this Special Issue (SI) of Journal of Seismology.}, language = {en} } @article{BeauvalHainzlScherbaum2006, author = {Beauval, C{\´e}line and Hainzl, Sebastian and Scherbaum, Frank}, title = {Probabilistic seismic hazard estimation in low-seismicity regions considering non-Poissonian seismic occurrence}, issn = {0956-540X}, doi = {10.1111/j.1365-246X.2006.02863.x}, year = {2006}, abstract = {In low-seismicity regions, such as France or Germany, the estimation of probabilistic seismic hazard must cope with the difficult identification of active faults and with the low amount of seismic data available. Since the probabilistic hazard method was initiated, most studies assume a Poissonian occurrence of earthquakes. Here we propose a method that enables the inclusion of time and space dependences between earthquakes into the probabilistic estimation of hazard. Combining the seismicity model Epidemic Type Aftershocks-Sequence (ETAS) with a Monte Carlo technique, aftershocks are naturally accounted for in the hazard determination. The method is applied to the Pyrenees region in Southern France. The impact on hazard of declustering and of the usual assumption that earthquakes occur according to a Poisson process is quantified, showing that aftershocks contribute on average less than 5 per cent to the probabilistic hazard, with an upper bound around 18 per cent}, language = {en} } @article{SchmedesHainzlReameretal.2005, author = {Schmedes, J. and Hainzl, Sebastian and Reamer, S. K. and Scherbaum, Frank and Hinzen, K. G.}, title = {Moment release in the Lower Rhine Embayment, Germany : seismological perspective of the deformation process}, issn = {0956-540X}, year = {2005}, abstract = {An important task of seismic hazard assessment consists of estimating the rate of seismic moment release which is correlated to the rate of tectonic deformation and the seismic coupling. However, the estimations of deformation depend on the type of information utilized (e.g. geodetic, geological, seismic) and include large uncertainties. We therefore estimate the deformation rate in the Lower Rhine Embayment (LRE), Germany, using an integrated approach where the uncertainties have been systematically incorporated. On the basis of a new homogeneous earthquake catalogue we initially determine the frequency-magnitude distribution by statistical methods. In particular, we focus on an adequate estimation of the upper bound of the Gutenberg-Richter relation and demonstrate the importance of additional palaeoseis- mological information. The integration of seismological and geological information yields a probability distribution of the upper bound magnitude. Using this distribution together with the distribution of Gutenberg-Richter a and b values, we perform Monte Carlo simulations to derive the seismic moment release as a function of the observation time. The seismic moment release estimated from synthetic earthquake catalogues with short catalogue length is found to systematically underestimate the long-term moment rate which can be analytically determined. The moment release recorded in the LRE over the last 250 yr is found to be in good agreement with the probability distribution resulting from the Monte Carlo simulations. Furthermore, the long-term distribution is within its uncertainties consistent with the moment rate derived by geological measurements, indicating an almost complete seismic coupling in this region. By means of Kostrov's formula, we additionally calculate the full deformation rate tensor using the distribution of known focal mechanisms in LRE. Finally, we use the same approach to calculate the seismic moment and the deformation rate for two subsets of the catalogue corresponding to the east- and west-dipping faults, respectively}, language = {en} } @article{SharmaHainzlZoelleretal.2020, author = {Sharma, Shubham and Hainzl, Sebastian and Z{\"o}ller, Gert and Holschneider, Matthias}, title = {Is Coulomb stress the best choice for aftershock forecasting?}, series = {Journal of geophysical research : Solid earth}, volume = {125}, journal = {Journal of geophysical research : Solid earth}, number = {9}, publisher = {American Geophysical Union}, address = {Washington}, issn = {2169-9313}, doi = {10.1029/2020JB019553}, pages = {12}, year = {2020}, abstract = {The Coulomb failure stress (CFS) criterion is the most commonly used method for predicting spatial distributions of aftershocks following large earthquakes. However, large uncertainties are always associated with the calculation of Coulomb stress change. The uncertainties mainly arise due to nonunique slip inversions and unknown receiver faults; especially for the latter, results are highly dependent on the choice of the assumed receiver mechanism. Based on binary tests (aftershocks yes/no), recent studies suggest that alternative stress quantities, a distance-slip probabilistic model as well as deep neural network (DNN) approaches, all are superior to CFS with predefined receiver mechanism. To challenge this conclusion, which might have large implications, we use 289 slip inversions from SRCMOD database to calculate more realistic CFS values for a layered half-space and variable receiver mechanisms. We also analyze the effect of the magnitude cutoff, grid size variation, and aftershock duration to verify the use of receiver operating characteristic (ROC) analysis for the ranking of stress metrics. The observations suggest that introducing a layered half-space does not improve the stress maps and ROC curves. However, results significantly improve for larger aftershocks and shorter time periods but without changing the ranking. We also go beyond binary testing and apply alternative statistics to test the ability to estimate aftershock numbers, which confirm that simple stress metrics perform better than the classic Coulomb failure stress calculations and are also better than the distance-slip probabilistic model.}, language = {en} } @article{HainzlZoellerMain2006, author = {Hainzl, Sebastian and Z{\"o}ller, Gert and Main, Ian}, title = {Introduction to special issue: Dynamics of seismicity patterns and earthquake triggering - Preface}, series = {Tectonophysics : international journal of geotectonics and the geology and physics of the interior of the earth}, volume = {424}, journal = {Tectonophysics : international journal of geotectonics and the geology and physics of the interior of the earth}, number = {Special issue}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0040-1951}, doi = {10.1016/j.tecto.2006.03.034}, pages = {135 -- 138}, year = {2006}, language = {en} }