@article{ShebalinNarteauZecharetal.2014, author = {Shebalin, Peter N. and Narteau, Clement and Zechar, Jeremy Douglas and Holschneider, Matthias}, title = {Combining earthquake forecasts using differential probability gains}, series = {Earth, planets and space}, volume = {66}, journal = {Earth, planets and space}, publisher = {Springer}, address = {Heidelberg}, issn = {1880-5981}, doi = {10.1186/1880-5981-66-37}, pages = {14}, year = {2014}, abstract = {We describe an iterative method to combine seismicity forecasts. With this method, we produce the next generation of a starting forecast by incorporating predictive skill from one or more input forecasts. For a single iteration, we use the differential probability gain of an input forecast relative to the starting forecast. At each point in space and time, the rate in the next-generation forecast is the product of the starting rate and the local differential probability gain. The main advantage of this method is that it can produce high forecast rates using all types of numerical forecast models, even those that are not rate-based. Naturally, a limitation of this method is that the input forecast must have some information not already contained in the starting forecast. We illustrate this method using the Every Earthquake a Precursor According to Scale (EEPAS) and Early Aftershocks Statistics (EAST) models, which are currently being evaluated at the US testing center of the Collaboratory for the Study of Earthquake Predictability. During a testing period from July 2009 to December 2011 (with 19 target earthquakes), the combined model we produce has better predictive performance - in terms of Molchan diagrams and likelihood - than the starting model (EEPAS) and the input model (EAST). Many of the target earthquakes occur in regions where the combined model has high forecast rates. Most importantly, the rates in these regions are substantially higher than if we had simply averaged the models.}, language = {en} } @article{BlaserOhrnbergerKruegeretal.2012, author = {Blaser, Lilian and Ohrnberger, Matthias and Kr{\"u}ger, Frank and Scherbaum, Frank}, title = {Probabilistic tsunami threat assessment of 10 recent earthquakes offshore Sumatra}, series = {Geophysical journal international}, volume = {188}, journal = {Geophysical journal international}, number = {3}, publisher = {Wiley-Blackwell}, address = {Malden}, issn = {0956-540X}, doi = {10.1111/j.1365-246X.2011.05324.x}, pages = {1273 -- 1284}, year = {2012}, abstract = {Tsunami early warning (TEW) is a challenging task as a decision has to be made within few minutes on the basis of incomplete and error-prone data. Deterministic warning systems have difficulties in integrating and quantifying the intrinsic uncertainties. In contrast, probabilistic approaches provide a framework that handles uncertainties in a natural way. Recently, we have proposed a method using Bayesian networks (BNs) that takes into account the uncertainties of seismic source parameter estimates in TEW. In this follow-up study, the method is applied to 10 recent large earthquakes offshore Sumatra and tested for its performance. We have evaluated both the general model performance given the best knowledge we have today about the source parameters of the 10 events and the corresponding response on seismic source information evaluated in real-time. We find that the resulting site-specific warning level probabilities represent well the available tsunami wave measurements and observations. Difficulties occur in the real-time tsunami assessment if the moment magnitude estimate is severely over- or underestimated. In general, the probabilistic analysis reveals a considerably large range of uncertainties in the near-field TEW. By quantifying the uncertainties the BN analysis provides important additional information to a decision maker in a warning centre to deal with the complexity in TEW and to reason under uncertainty.}, language = {en} } @article{StraderSchneiderSchorlemmer2017, author = {Strader, Anne and Schneider, Max and Schorlemmer, Danijel}, title = {Prospective and retrospective evaluation of five-year earthquake forecast models for California}, series = {Geophysical journal international}, volume = {211}, journal = {Geophysical journal international}, publisher = {Oxford Univ. Press}, address = {Oxford}, issn = {0956-540X}, doi = {10.1093/gji/ggx268}, pages = {239 -- 251}, year = {2017}, language = {en} } @article{SanchezWichtBaerenzungetal.2019, author = {Sanchez, S. and Wicht, J. and Baerenzung, Julien and Holschneider, Matthias}, title = {Sequential assimilation of geomagnetic observations}, series = {Geophysical journal international}, volume = {217}, journal = {Geophysical journal international}, number = {2}, publisher = {Oxford Univ. Press}, address = {Oxford}, issn = {0956-540X}, doi = {10.1093/gji/ggz090}, pages = {1434 -- 1450}, year = {2019}, abstract = {High-precision observations of the present-day geomagnetic field by ground-based observatories and satellites provide unprecedented conditions for unveiling the dynamics of the Earth's core. Combining geomagnetic observations with dynamo simulations in a data assimilation (DA) framework allows the reconstruction of past and present states of the internal core dynamics. The essential information that couples the internal state to the observations is provided by the statistical correlations from a numerical dynamo model in the form of a model covariance matrix. Here we test a sequential DA framework, working through a succession of forecast and analysis steps, that extracts the correlations from an ensemble of dynamo models. The primary correlations couple variables of the same azimuthal wave number, reflecting the predominant axial symmetry of the magnetic field. Synthetic tests show that the scheme becomes unstable when confronted with high-precision geomagnetic observations. Our study has identified spurious secondary correlations as the origin of the problem. Keeping only the primary correlations by localizing the covariance matrix with respect to the azimuthal wave number suffices to stabilize the assimilation. While the first analysis step is fundamental in constraining the large-scale interior state, further assimilation steps refine the smaller and more dynamical scales. This refinement turns out to be critical for long-term geomagnetic predictions. Increasing the assimilation steps from one to 18 roughly doubles the prediction horizon for the dipole from about  tree to six centuries, and from 30 to about  60 yr for smaller observable scales. This improvement is also reflected on the predictability of surface intensity features such as the South Atlantic Anomaly. Intensity prediction errors are decreased roughly by a half when assimilating long observation sequences.}, language = {en} }