@phdthesis{Zhelavskaya2020, author = {Zhelavskaya, Irina}, title = {Modeling of the Plasmasphere Dynamics}, doi = {10.25932/publishup-48243}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-482433}, school = {Universit{\"a}t Potsdam}, pages = {xlii, 256}, year = {2020}, abstract = {The plasmasphere is a dynamic region of cold, dense plasma surrounding the Earth. Its shape and size are highly susceptible to variations in solar and geomagnetic conditions. Having an accurate model of plasma density in the plasmasphere is important for GNSS navigation and for predicting hazardous effects of radiation in space on spacecraft. The distribution of cold plasma and its dynamic dependence on solar wind and geomagnetic conditions remain, however, poorly quantified. Existing empirical models of plasma density tend to be oversimplified as they are based on statistical averages over static parameters. Understanding the global dynamics of the plasmasphere using observations from space remains a challenge, as existing density measurements are sparse and limited to locations where satellites can provide in-situ observations. In this dissertation, we demonstrate how such sparse electron density measurements can be used to reconstruct the global electron density distribution in the plasmasphere and capture its dynamic dependence on solar wind and geomagnetic conditions. First, we develop an automated algorithm to determine the electron density from in-situ measurements of the electric field on the Van Allen Probes spacecraft. In particular, we design a neural network to infer the upper hybrid resonance frequency from the dynamic spectrograms obtained with the Electric and Magnetic Field Instrument Suite and Integrated Science (EMFISIS) instrumentation suite, which is then used to calculate the electron number density. The developed Neural-network-based Upper hybrid Resonance Determination (NURD) algorithm is applied to more than four years of EMFISIS measurements to produce the publicly available electron density data set. We utilize the obtained electron density data set to develop a new global model of plasma density by employing a neural network-based modeling approach. In addition to the location, the model takes the time history of geomagnetic indices and location as inputs, and produces electron density in the equatorial plane as an output. It is extensively validated using in-situ density measurements from the Van Allen Probes mission, and also by comparing the predicted global evolution of the plasmasphere with the global IMAGE EUV images of He+ distribution. The model successfully reproduces erosion of the plasmasphere on the night side as well as plume formation and evolution, and agrees well with data. The performance of neural networks strongly depends on the availability of training data, which is limited during intervals of high geomagnetic activity. In order to provide reliable density predictions during such intervals, we can employ physics-based modeling. We develop a new approach for optimally combining the neural network- and physics-based models of the plasmasphere by means of data assimilation. The developed approach utilizes advantages of both neural network- and physics-based modeling and produces reliable global plasma density reconstructions for quiet, disturbed, and extreme geomagnetic conditions. Finally, we extend the developed machine learning-based tools and apply them to another important problem in the field of space weather, the prediction of the geomagnetic index Kp. The Kp index is one of the most widely used indicators for space weather alerts and serves as input to various models, such as for the thermosphere, the radiation belts and the plasmasphere. It is therefore crucial to predict the Kp index accurately. Previous work in this area has mostly employed artificial neural networks to nowcast and make short-term predictions of Kp, basing their inferences on the recent history of Kp and solar wind measurements at L1. We analyze how the performance of neural networks compares to other machine learning algorithms for nowcasting and forecasting Kp for up to 12 hours ahead. Additionally, we investigate several machine learning and information theory methods for selecting the optimal inputs to a predictive model of Kp. The developed tools for feature selection can also be applied to other problems in space physics in order to reduce the input dimensionality and identify the most important drivers. Research outlined in this dissertation clearly demonstrates that machine learning tools can be used to develop empirical models from sparse data and also can be used to understand the underlying physical processes. Combining machine learning, physics-based modeling and data assimilation allows us to develop novel methods benefiting from these different approaches.}, language = {en} } @article{SanchezWichtBaerenzung2020, author = {Sanchez, Sabrina and Wicht, Johannes and B{\"a}renzung, Julien}, title = {Predictions of the geomagnetic secular variation based on the ensemble sequential assimilation of geomagnetic field models by dynamo simulations}, series = {Earth, planets and space}, volume = {72}, journal = {Earth, planets and space}, number = {1}, publisher = {Springer}, address = {New York}, issn = {1880-5981}, doi = {10.1186/s40623-020-01279-y}, pages = {20}, year = {2020}, abstract = {The IGRF offers an important incentive for testing algorithms predicting the Earth's magnetic field changes, known as secular variation (SV), in a 5-year range. Here, we present a SV candidate model for the 13th IGRF that stems from a sequential ensemble data assimilation approach (EnKF). The ensemble consists of a number of parallel-running 3D-dynamo simulations. The assimilated data are geomagnetic field snapshots covering the years 1840 to 2000 from the COV-OBS.x1 model and for 2001 to 2020 from the Kalmag model. A spectral covariance localization method, considering the couplings between spherical harmonics of the same equatorial symmetry and same azimuthal wave number, allows decreasing the ensemble size to about a 100 while maintaining the stability of the assimilation. The quality of 5-year predictions is tested for the past two decades. These tests show that the assimilation scheme is able to reconstruct the overall SV evolution. They also suggest that a better 5-year forecast is obtained keeping the SV constant compared to the dynamically evolving SV. However, the quality of the dynamical forecast steadily improves over the full assimilation window (180 years). We therefore propose the instantaneous SV estimate for 2020 from our assimilation as a candidate model for the IGRF-13. The ensemble approach provides uncertainty estimates, which closely match the residual differences with respect to the IGRF-13. Longer term predictions for the evolution of the main magnetic field features over a 50-year range are also presented. We observe the further decrease of the axial dipole at a mean rate of 8 nT/year as well as a deepening and broadening of the South Atlantic Anomaly. The magnetic dip poles are seen to approach an eccentric dipole configuration.}, language = {en} } @article{Reich2011, author = {Reich, Sebastian}, title = {A dynamical systems framework for intermittent data assimilation}, series = {BIT : numerical mathematics ; the leading applied mathematics journal for all computational mathematicians}, volume = {51}, journal = {BIT : numerical mathematics ; the leading applied mathematics journal for all computational mathematicians}, number = {1}, publisher = {Springer}, address = {Dordrecht}, issn = {0006-3835}, doi = {10.1007/s10543-010-0302-4}, pages = {235 -- 249}, year = {2011}, abstract = {We consider the problem of discrete time filtering (intermittent data assimilation) for differential equation models and discuss methods for its numerical approximation. The focus is on methods based on ensemble/particle techniques and on the ensemble Kalman filter technique in particular. We summarize as well as extend recent work on continuous ensemble Kalman filter formulations, which provide a concise dynamical systems formulation of the combined dynamics-assimilation problem. Possible extensions to fully nonlinear ensemble/particle based filters are also outlined using the framework of optimal transportation theory.}, language = {en} } @article{GottwaldReich2021, author = {Gottwald, Georg A. and Reich, Sebastian}, title = {Supervised learning from noisy observations}, series = {Physica : D, Nonlinear phenomena}, volume = {423}, journal = {Physica : D, Nonlinear phenomena}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0167-2789}, doi = {10.1016/j.physd.2021.132911}, pages = {15}, year = {2021}, abstract = {Data-driven prediction and physics-agnostic machine-learning methods have attracted increased interest in recent years achieving forecast horizons going well beyond those to be expected for chaotic dynamical systems. In a separate strand of research data-assimilation has been successfully used to optimally combine forecast models and their inherent uncertainty with incoming noisy observations. The key idea in our work here is to achieve increased forecast capabilities by judiciously combining machine-learning algorithms and data assimilation. We combine the physics-agnostic data -driven approach of random feature maps as a forecast model within an ensemble Kalman filter data assimilation procedure. The machine-learning model is learned sequentially by incorporating incoming noisy observations. We show that the obtained forecast model has remarkably good forecast skill while being computationally cheap once trained. Going beyond the task of forecasting, we show that our method can be used to generate reliable ensembles for probabilistic forecasting as well as to learn effective model closure in multi-scale systems. (C) 2021 Elsevier B.V. All rights reserved.}, language = {en} } @article{CucchiHesseKawaetal.2019, author = {Cucchi, Karma and Hesse, Falk and Kawa, Nura and Wang, Changhong and Rubin, Yoram}, title = {Ex-situ priors: A Bayesian hierarchical framework for defining informative prior distributions in hydrogeology}, series = {Advances in water resources}, volume = {126}, journal = {Advances in water resources}, publisher = {Elsevier}, address = {Oxford}, issn = {0309-1708}, doi = {10.1016/j.advwatres.2019.02.003}, pages = {65 -- 78}, year = {2019}, abstract = {Stochastic modeling is a common practice for modeling uncertainty in hydrogeology. In stochastic modeling, aquifer properties are characterized by their probability density functions (PDFs). The Bayesian approach for inverse modeling is often used to assimilate information from field measurements collected at a site into properties' posterior PDFs. This necessitates the definition of a prior PDF, characterizing the knowledge of hydrological properties before undertaking any investigation at the site, and usually coming from previous studies at similar sites. In this paper, we introduce a Bayesian hierarchical algorithm capable of assimilating various information-like point measurements, bounds and moments-into a single, informative PDF that we call ex-situ prior. This informative PDF summarizes the ex-situ information available about a hydrogeological parameter at a site of interest, which can then be used as a prior PDF in future studies at the site. We demonstrate the behavior of the algorithm on several synthetic case studies, compare it to other methods described in the literature, and illustrate the approach by applying it to a public open-access hydrogeological dataset.}, language = {en} } @article{AcevedoReichCubasch2016, author = {Acevedo, Walter and Reich, Sebastian and Cubasch, Ulrich}, title = {Towards the assimilation of tree-ring-width records using ensemble Kalman filtering techniques}, series = {Climate dynamics : observational, theoretical and computational research on the climate system}, volume = {46}, journal = {Climate dynamics : observational, theoretical and computational research on the climate system}, publisher = {Springer}, address = {New York}, issn = {0930-7575}, doi = {10.1007/s00382-015-2683-1}, pages = {1909 -- 1920}, year = {2016}, abstract = {This paper investigates the applicability of the Vaganov-Shashkin-Lite (VSL) forward model for tree-ring-width chronologies as observation operator within a proxy data assimilation (DA) setting. Based on the principle of limiting factors, VSL combines temperature and moisture time series in a nonlinear fashion to obtain simulated TRW chronologies. When used as observation operator, this modelling approach implies three compounding, challenging features: (1) time averaging, (2) "switching recording" of 2 variables and (3) bounded response windows leading to "thresholded response". We generate pseudo-TRW observations from a chaotic 2-scale dynamical system, used as a cartoon of the atmosphere-land system, and attempt to assimilate them via ensemble Kalman filtering techniques. Results within our simplified setting reveal that VSL's nonlinearities may lead to considerable loss of assimilation skill, as compared to the utilization of a time-averaged (TA) linear observation operator. In order to understand this undesired effect, we embed VSL's formulation into the framework of fuzzy logic (FL) theory, which thereby exposes multiple representations of the principle of limiting factors. DA experiments employing three alternative growth rate functions disclose a strong link between the lack of smoothness of the growth rate function and the loss of optimality in the estimate of the TA state. Accordingly, VSL's performance as observation operator can be enhanced by resorting to smoother FL representations of the principle of limiting factors. This finding fosters new interpretations of tree-ring-growth limitation processes.}, language = {en} }