@phdthesis{Zoeller2005, author = {Z{\"o}ller, Gert}, title = {Critical states of seismicity : modeling and data analysis}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-7427}, school = {Universit{\"a}t Potsdam}, year = {2005}, abstract = {The occurrence of earthquakes is characterized by a high degree of spatiotemporal complexity. Although numerous patterns, e.g. fore- and aftershock sequences, are well-known, the underlying mechanisms are not observable and thus not understood. Because the recurrence times of large earthquakes are usually decades or centuries, the number of such events in corresponding data sets is too small to draw conclusions with reasonable statistical significance. Therefore, the present study combines both, numerical modeling and analysis of real data in order to unveil the relationships between physical mechanisms and observational quantities. The key hypothesis is the validity of the so-called "critical point concept" for earthquakes, which assumes large earthquakes to occur as phase transitions in a spatially extended many-particle system, similar to percolation models. New concepts are developed to detect critical states in simulated and in natural data sets. The results indicate that important features of seismicity like the frequency-size distribution and the temporal clustering of earthquakes depend on frictional and structural fault parameters. In particular, the degree of quenched spatial disorder (the "roughness") of a fault zone determines whether large earthquakes occur quasiperiodically or more clustered. This illustrates the power of numerical models in order to identify regions in parameter space, which are relevant for natural seismicity. The critical point concept is verified for both, synthetic and natural seismicity, in terms of a critical state which precedes a large earthquake: a gradual roughening of the (unobservable) stress field leads to a scale-free (observable) frequency-size distribution. Furthermore, the growth of the spatial correlation length and the acceleration of the seismic energy release prior to large events is found. The predictive power of these precursors is, however, limited. Instead of forecasting time, location, and magnitude of individual events, a contribution to a broad multiparameter approach is encouraging.}, subject = {Seismizit{\"a}t}, language = {en} } @phdthesis{Cattania2015, author = {Cattania, Camilla}, title = {Improvement of aftershock models based on Coulomb stress changes and rate-and-state dependent friction}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-87097}, school = {Universit{\"a}t Potsdam}, pages = {xvi, 123}, year = {2015}, abstract = {Earthquake clustering has proven the most useful tool to forecast changes in seismicity rates in the short and medium term (hours to months), and efforts are currently being made to extend the scope of such models to operational earthquake forecasting. The overarching goal of the research presented in this thesis is to improve physics-based earthquake forecasts, with a focus on aftershock sequences. Physical models of triggered seismicity are based on the redistribution of stresses in the crust, coupled with the rate-and-state constitutive law proposed by Dieterich to calculate changes in seismicity rate. This type of models are known as Coulomb- rate and-state (CRS) models. In spite of the success of the Coulomb hypothesis, CRS models typically performed poorly in comparison to statistical ones, and they have been underepresented in the operational forecasting context. In this thesis, I address some of these issues, and in particular these questions: (1) How can we realistically model the uncertainties and heterogeneity of the mainshock stress field? (2) What is the effect of time dependent stresses in the postseismic phase on seismicity? I focus on two case studies from different tectonic settings: the Mw 9.0 Tohoku megathrust and the Mw 6.0 Parkfield strike slip earthquake. I study aleatoric uncertainties using a Monte Carlo method. I find that the existence of multiple receiver faults is the most important source of intrinsic stress heterogeneity, and CRS models perform better when this variability is taken into account. Epistemic uncertainties inherited from the slip models also have a significant impact on the forecast, and I find that an ensemble model based on several slip distributions outperforms most individual models. I address the role of postseismic stresses due to aseismic slip on the mainshock fault (afterslip) and to the redistribution of stresses by previous aftershocks (secondary triggering). I find that modeling secondary triggering improves model performance. The effect of afterslip is less clear, and difficult to assess for near-fault aftershocks due to the large uncertainties of the afterslip models. Off-fault events, on the other hand, are less sensitive to the details of the slip distribution: I find that following the Tohoku earthquake, afterslip promotes seismicity in the Fukushima region. To evaluate the performance of the improved CRS models in a pseudo-operational context, I submitted them for independent testing to a collaborative experiment carried out by CSEP for the 2010-2012 Canterbury sequence. Preliminary results indicate that physical models generally perform well compared to statistical ones, suggesting that CRS models may have a role to play in the future of operational forecasting. To facilitate efforts in this direction, and to enable future studies of earthquake triggering by time dependent processes, I have made the code open source. In the final part of this thesis I summarize the capabilities of the program and outline technical aspects regarding performance and parallelization strategies.}, language = {en} } @phdthesis{BayonaViveros2021, author = {Bayona Viveros, Jose}, title = {Constructing global stationary seismicity models from the long-term balance of interseismic strain measurements and earthquake-catalog data}, doi = {10.25932/publishup-50927}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-509270}, school = {Universit{\"a}t Potsdam}, pages = {ix, 83}, year = {2021}, abstract = {One third of the world's population lives in areas where earthquakes causing at least slight damage are frequently expected. Thus, the development and testing of global seismicity models is essential to improving seismic hazard estimates and earthquake-preparedness protocols for effective disaster-risk mitigation. Currently, the availability and quality of geodetic data along plate-boundary regions provides the opportunity to construct global models of plate motion and strain rate, which can be translated into global maps of forecasted seismicity. Moreover, the broad coverage of existing earthquake catalogs facilitates in present-day the calibration and testing of global seismicity models. As a result, modern global seismicity models can integrate two independent factors necessary for physics-based, long-term earthquake forecasting, namely interseismic crustal strain accumulation and sudden lithospheric stress release. In this dissertation, I present the construction of and testing results for two global ensemble seismicity models, aimed at providing mean rates of shallow (0-70 km) earthquake activity for seismic hazard assessment. These models depend on the Subduction Megathrust Earthquake Rate Forecast (SMERF2), a stationary seismicity approach for subduction zones, based on the conservation of moment principle and the use of regional "geodesy-to-seismicity" parameters, such as corner magnitudes, seismogenic thicknesses and subduction dip angles. Specifically, this interface-earthquake model combines geodetic strain rates with instrumentally-recorded seismicity to compute long-term rates of seismic and geodetic moment. Based on this, I derive analytical solutions for seismic coupling and earthquake activity, which provide this earthquake model with the initial abilities to properly forecast interface seismicity. Then, I integrate SMERF2 interface-seismicity estimates with earthquake computations in non-subduction zones provided by the Seismic Hazard Inferred From Tectonics based on the second iteration of the Global Strain Rate Map seismicity approach to construct the global Tectonic Earthquake Activity Model (TEAM). Thus, TEAM is designed to reduce number, and potentially spatial, earthquake inconsistencies of its predecessor tectonic earthquake model during the 2015-2017 period. Also, I combine this new geodetic-based earthquake approach with a global smoothed-seismicity model to create the World Hybrid Earthquake Estimates based on Likelihood scores (WHEEL) model. This updated hybrid model serves as an alternative earthquake-rate approach to the Global Earthquake Activity Rate model for forecasting long-term rates of shallow seismicity everywhere on Earth. Global seismicity models provide scientific hypotheses about when and where earthquakes may occur, and how big they might be. Nonetheless, the veracity of these hypotheses can only be either confirmed or rejected after prospective forecast evaluation. Therefore, I finally test the consistency and relative performance of these global seismicity models with independent observations recorded during the 2014-2019 pseudo-prospective evaluation period. As a result, hybrid earthquake models based on both geodesy and seismicity are the most informative seismicity models during the testing time frame, as they obtain higher information scores than their constituent model components. These results support the combination of interseismic strain measurements with earthquake-catalog data for improved seismicity modeling. However, further prospective evaluations are required to more accurately describe the capacities of these global ensemble seismicity models to forecast longer-term earthquake activity.}, language = {en} }