@article{VitaliSposiniSliusarenkoetal.2018, author = {Vitali, Silvia and Sposini, Vittoria and Sliusarenko, Oleksii and Paradisi, Paolo and Castellani, Gastone and Pagnini, Gianni}, title = {Langevin equation in complex media and anomalous diffusion}, series = {Interface : journal of the Royal Society}, volume = {15}, journal = {Interface : journal of the Royal Society}, number = {145}, publisher = {Royal Society}, address = {London}, issn = {1742-5689}, doi = {10.1098/rsif.2018.0282}, pages = {10}, year = {2018}, abstract = {The problem of biological motion is a very intriguing and topical issue. Many efforts are being focused on the development of novel modelling approaches for the description of anomalous diffusion in biological systems, such as the very complex and heterogeneous cell environment. Nevertheless, many questions are still open, such as the joint manifestation of statistical features in agreement with different models that can also be somewhat alternative to each other, e.g. continuous time random walk and fractional Brownian motion. To overcome these limitations, we propose a stochastic diffusion model with additive noise and linear friction force (linear Langevin equation), thus involving the explicit modelling of velocity dynamics. The complexity of the medium is parametrized via a population of intensity parameters (relaxation time and diffusivity of velocity), thus introducing an additional randomness, in addition to white noise, in the particle's dynamics. We prove that, for proper distributions of these parameters, we can get both Gaussian anomalous diffusion, fractional diffusion and its generalizations.}, language = {en} } @article{SliusarenkoVitaliSposinietal.2019, author = {Sliusarenko, Oleksii Yu and Vitali, Silvia and Sposini, Vittoria and Paradisi, Paolo and Chechkin, Aleksei V. and Castellani, Gastone and Pagnini, Gianni}, title = {Finite-energy Levy-type motion through heterogeneous ensemble of Brownian particles}, series = {Journal of physics : A, Mathematical and theoretical}, volume = {52}, journal = {Journal of physics : A, Mathematical and theoretical}, number = {9}, publisher = {IOP Publ. Ltd.}, address = {Bristol}, issn = {1751-8113}, doi = {10.1088/1751-8121/aafe90}, pages = {27}, year = {2019}, abstract = {Complex systems are known to display anomalous diffusion, whose signature is a space/time scaling x similar to t(delta) with delta not equal 1/2 in the probability density function (PDF). Anomalous diffusion can emerge jointly with both Gaussian, e.g. fractional Brownian motion, and power-law decaying distributions, e.g. Levy Flights or Levy Walks (LWs). Levy flights get anomalous scaling, but, being jumps of any size allowed even at short times, have infinite position variance, infinite energy and discontinuous paths. LWs, which are based on random trapping events, overcome these limitations: they resemble a Levy-type power-law distribution that is truncated in the large displacement range and have finite moments, finite energy and, even with discontinuous velocity, they are continuous. However, LWs do not take into account the role of strong heterogeneity in many complex systems, such as biological transport in the crowded cell environment. In this work we propose and discuss a model describing a heterogeneous ensemble of Brownian particles (HEBP). Velocity of each single particle obeys a standard underdamped Langevin equation for the velocity, with linear friction term and additive Gaussian noise. Each particle is characterized by its own relaxation time and velocity diffusivity. We show that, for proper distributions of relaxation time and velocity diffusivity, the HEBP resembles some LW statistical features, in particular power-law decaying PDF, long-range correlations and anomalous diffusion, at the same time keeping finite position moments and finite energy. The main differences between the HEBP model and two different LWs are investigated, finding that, even when both velocity and position PDFs are similar, they differ in four main aspects: (i) LWs are biscaling, while HEBP is monoscaling; (ii) a transition from anomalous (delta = 1/2) to normal (delta = 1/2) diffusion in the long-time regime is seen in the HEBP and not in LWs; (iii) the power-law index of the position PDF and the space/time diffusion scaling are independent in the HEBP, while they both depend on the scaling of the interevent time PDF in LWs; (iv) at variance with LWs, our HEBP model obeys a fluctuation-dissipation theorem.}, language = {en} } @phdthesis{Schanner2022, author = {Schanner, Maximilian Arthus}, title = {Correlation based modeling of the archeomagnetic field}, doi = {10.25932/publishup-55587}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-555875}, school = {Universit{\"a}t Potsdam}, pages = {vii, 146}, year = {2022}, abstract = {The geomagnetic main field is vital for live on Earth, as it shields our habitat against the solar wind and cosmic rays. It is generated by the geodynamo in the Earth's outer core and has a rich dynamic on various timescales. Global models of the field are used to study the interaction of the field and incoming charged particles, but also to infer core dynamics and to feed numerical simulations of the geodynamo. Modern satellite missions, such as the SWARM or the CHAMP mission, support high resolution reconstructions of the global field. From the 19 th century on, a global network of magnetic observatories has been established. It is growing ever since and global models can be constructed from the data it provides. Geomagnetic field models that extend further back in time rely on indirect observations of the field, i.e. thermoremanent records such as burnt clay or volcanic rocks and sediment records from lakes and seas. These indirect records come with (partially very large) uncertainties, introduced by the complex measurement methods and the dating procedure. Focusing on thermoremanent records only, the aim of this thesis is the development of a new modeling strategy for the global geomagnetic field during the Holocene, which takes the uncertainties into account and produces realistic estimates of the reliability of the model. This aim is approached by first considering snapshot models, in order to address the irregular spatial distribution of the records and the non-linear relation of the indirect observations to the field itself. In a Bayesian setting, a modeling algorithm based on Gaussian process regression is developed and applied to binned data. The modeling algorithm is then extended to the temporal domain and expanded to incorporate dating uncertainties. Finally, the algorithm is sequentialized to deal with numerical challenges arising from the size of the Holocene dataset. The central result of this thesis, including all of the aspects mentioned, is a new global geomagnetic field model. It covers the whole Holocene, back until 12000 BCE, and we call it ArchKalmag14k. When considering the uncertainties that are produced together with the model, it is evident that before 6000 BCE the thermoremanent database is not sufficient to support global models. For times more recent, ArchKalmag14k can be used to analyze features of the field under consideration of posterior uncertainties. The algorithm for generating ArchKalmag14k can be applied to different datasets and is provided to the community as an open source python package.}, language = {en} } @phdthesis{Lindinger2023, author = {Lindinger, Jakob}, title = {Variational inference for composite Gaussian process models}, doi = {10.25932/publishup-60444}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-604441}, school = {Universit{\"a}t Potsdam}, pages = {xi, 122}, year = {2023}, abstract = {Most machine learning methods provide only point estimates when being queried to predict on new data. This is problematic when the data is corrupted by noise, e.g. from imperfect measurements, or when the queried data point is very different to the data that the machine learning model has been trained with. Probabilistic modelling in machine learning naturally equips predictions with corresponding uncertainty estimates which allows a practitioner to incorporate information about measurement noise into the modelling process and to know when not to trust the predictions. A well-understood, flexible probabilistic framework is provided by Gaussian processes that are ideal as building blocks of probabilistic models. They lend themself naturally to the problem of regression, i.e., being given a set of inputs and corresponding observations and then predicting likely observations for new unseen inputs, and can also be adapted to many more machine learning tasks. However, exactly inferring the optimal parameters of such a Gaussian process model (in a computationally tractable manner) is only possible for regression tasks in small data regimes. Otherwise, approximate inference methods are needed, the most prominent of which is variational inference. In this dissertation we study models that are composed of Gaussian processes embedded in other models in order to make those more flexible and/or probabilistic. The first example are deep Gaussian processes which can be thought of as a small network of Gaussian processes and which can be employed for flexible regression. The second model class that we study are Gaussian process state-space models. These can be used for time-series modelling, i.e., the task of being given a stream of data ordered by time and then predicting future observations. For both model classes the state-of-the-art approaches offer a trade-off between expressive models and computational properties (e.g. speed or convergence properties) and mostly employ variational inference. Our goal is to improve inference in both models by first getting a deep understanding of the existing methods and then, based on this, to design better inference methods. We achieve this by either exploring the existing trade-offs or by providing general improvements applicable to multiple methods. We first provide an extensive background, introducing Gaussian processes and their sparse (approximate and efficient) variants. We continue with a description of the models under consideration in this thesis, deep Gaussian processes and Gaussian process state-space models, including detailed derivations and a theoretical comparison of existing methods. Then we start analysing deep Gaussian processes more closely: Trading off the properties (good optimisation versus expressivity) of state-of-the-art methods in this field, we propose a new variational inference based approach. We then demonstrate experimentally that our new algorithm leads to better calibrated uncertainty estimates than existing methods. Next, we turn our attention to Gaussian process state-space models, where we closely analyse the theoretical properties of existing methods.The understanding gained in this process leads us to propose a new inference scheme for general Gaussian process state-space models that incorporates effects on multiple time scales. This method is more efficient than previous approaches for long timeseries and outperforms its comparison partners on data sets in which effects on multiple time scales (fast and slowly varying dynamics) are present. Finally, we propose a new inference approach for Gaussian process state-space models that trades off the properties of state-of-the-art methods in this field. By combining variational inference with another approximate inference method, the Laplace approximation, we design an efficient algorithm that outperforms its comparison partners since it achieves better calibrated uncertainties.}, language = {en} }