@article{ZoellerHolschneiderHainzl2013, author = {Z{\"o}ller, Gert and Holschneider, Matthias and Hainzl, Sebastian}, title = {The Maximum Earthquake Magnitude in a Time Horizon: Theory and Case Studies}, series = {Bulletin of the Seismological Society of America}, volume = {103}, journal = {Bulletin of the Seismological Society of America}, number = {2A}, publisher = {Seismological Society of America}, address = {Albany}, issn = {0037-1106}, doi = {10.1785/0120120013}, pages = {860 -- 875}, year = {2013}, abstract = {We show how the maximum magnitude within a predefined future time horizon may be estimated from an earthquake catalog within the context of Gutenberg-Richter statistics. The aim is to carry out a rigorous uncertainty assessment, and calculate precise confidence intervals based on an imposed level of confidence a. In detail, we present a model for the estimation of the maximum magnitude to occur in a time interval T-f in the future, given a complete earthquake catalog for a time period T in the past and, if available, paleoseismic events. For this goal, we solely assume that earthquakes follow a stationary Poisson process in time with unknown productivity Lambda and obey the Gutenberg-Richter law in magnitude domain with unknown b-value. The random variables. and b are estimated by means of Bayes theorem with noninformative prior distributions. Results based on synthetic catalogs and on retrospective calculations of historic catalogs from the highly active area of Japan and the low-seismicity, but high-risk region lower Rhine embayment (LRE) in Germany indicate that the estimated magnitudes are close to the true values. Finally, we discuss whether the techniques can be extended to meet the safety requirements for critical facilities such as nuclear power plants. For this aim, the maximum magnitude for all times has to be considered. In agreement with earlier work, we find that this parameter is not a useful quantity from the viewpoint of statistical inference.}, language = {en} } @article{HainzlZoellerBrietzkeetal.2013, author = {Hainzl, Sebastian and Z{\"o}ller, Gert and Brietzke, Gilbert B. and Hinzen, Klaus-G.}, title = {Comparison of deterministic and stochastic earthquake simulators for fault interactions in the Lower Rhine Embayment, Germany}, series = {Geophysical journal international}, volume = {195}, journal = {Geophysical journal international}, number = {1}, publisher = {Oxford Univ. Press}, address = {Oxford}, issn = {0956-540X}, doi = {10.1093/gji/ggt271}, pages = {684 -- 694}, year = {2013}, abstract = {Time-dependent probabilistic seismic hazard assessment requires a stochastic description of earthquake occurrences. While short-term seismicity models are well-constrained by observations, the recurrences of characteristic on-fault earthquakes are only derived from theoretical considerations, uncertain palaeo-events or proxy data. Despite the involved uncertainties and complexity, simple statistical models for a quasi-period recurrence of on-fault events are implemented in seismic hazard assessments. To test the applicability of statistical models, such as the Brownian relaxation oscillator or the stress release model, we perform a systematic comparison with deterministic simulations based on rate- and state-dependent friction, high-resolution representations of fault systems and quasi-dynamic rupture propagation. For the specific fault network of the Lower Rhine Embayment, Germany, we run both stochastic and deterministic model simulations based on the same fault geometries and stress interactions. Our results indicate that the stochastic simulators are able to reproduce the first-order characteristics of the major earthquakes on isolated faults as well as for coupled faults with moderate stress interactions. However, we find that all tested statistical models fail to reproduce the characteristics of strongly coupled faults, because multisegment rupturing resulting from a spatiotemporally correlated stress field is underestimated in the stochastic simulators. Our results suggest that stochastic models have to be extended by multirupture probability distributions to provide more reliable results.}, language = {en} } @article{Zoeller2013, author = {Z{\"o}ller, Gert}, title = {Convergence of the frequency-magnitude distribution of global earthquakes - maybe in 200 years}, series = {Geophysical research letters}, volume = {40}, journal = {Geophysical research letters}, number = {15}, publisher = {American Geophysical Union}, address = {Washington}, issn = {0094-8276}, doi = {10.1002/grl.50779}, pages = {3873 -- 3877}, year = {2013}, abstract = {I study the ability to estimate the tail of the frequency-magnitude distribution of global earthquakes. While power-law scaling for small earthquakes is accepted by support of data, the tail remains speculative. In a recent study, Bell et al. (2013) claim that the frequency-magnitude distribution of global earthquakes converges to a tapered Pareto distribution. I show that this finding results from data fitting errors, namely from the biased maximum likelihood estimation of the corner magnitude theta in strongly undersampled models. In particular, the estimation of theta depends solely on the few largest events in the catalog. Taking this into account, I compare various state-of-the-art models for the global frequency-magnitude distribution. After discarding undersampled models, the remaining ones, including the unbounded Gutenberg-Richter distribution, perform all equally well and are, therefore, indistinguishable. Convergence to a specific distribution, if it ever takes place, requires about 200 years homogeneous recording of global seismicity, at least.}, language = {en} }