@article{WessigGerngrossPapeetal.2014, author = {Wessig, Pablo and Gerngroß, Maik and Pape, Simon and Bruhns, Philipp and Weber, Jens}, title = {Novel porous materials based on oligospiroketals (OSK)}, series = {RSC Advances : an international journal to further the chemical sciences}, volume = {2014}, journal = {RSC Advances : an international journal to further the chemical sciences}, number = {4}, issn = {2046-2069}, doi = {10.1039/c4ra04437a}, pages = {31123 -- 31129}, year = {2014}, abstract = {New porous materials based on covalently connected monomers are presented. The key step of the synthesis is an acetalisation reaction. In previous years we used acetalisation reactions extensively to build up various molecular rods. Based on this approach, investigations towards porous polymeric materials were conducted by us. Here we wish to present the results of these studies in the synthesis of 1D polyacetals and porous 3D polyacetals. By scrambling experiments with 1D acetals we could prove that exchange reactions occur between different building blocks (evidenced by MALDI-TOF mass spectrometry). Based on these results we synthesized porous 3D polyacetals under the same mild conditions.}, language = {en} } @article{SecklerMetzler2022, author = {Seckler, Henrik and Metzler, Ralf}, title = {Bayesian deep learning for error estimation in the analysis of anomalous diffusion}, series = {Nature Communnications}, volume = {13}, journal = {Nature Communnications}, publisher = {Nature Publishing Group UK}, address = {London}, issn = {2041-1723}, doi = {10.1038/s41467-022-34305-6}, pages = {13}, year = {2022}, abstract = {Modern single-particle-tracking techniques produce extensive time-series of diffusive motion in a wide variety of systems, from single-molecule motion in living-cells to movement ecology. The quest is to decipher the physical mechanisms encoded in the data and thus to better understand the probed systems. We here augment recently proposed machine-learning techniques for decoding anomalous-diffusion data to include an uncertainty estimate in addition to the predicted output. To avoid the Black-Box-Problem a Bayesian-Deep-Learning technique named Stochastic-Weight-Averaging-Gaussian is used to train models for both the classification of the diffusionmodel and the regression of the anomalous diffusion exponent of single-particle-trajectories. Evaluating their performance, we find that these models can achieve a wellcalibrated error estimate while maintaining high prediction accuracies. In the analysis of the output uncertainty predictions we relate these to properties of the underlying diffusion models, thus providing insights into the learning process of the machine and the relevance of the output.}, language = {en} } @article{SchoppaSiegVogeletal.2020, author = {Schoppa, Lukas and Sieg, Tobias and Vogel, Kristin and Z{\"o}ller, Gert and Kreibich, Heidi}, title = {Probabilistic flood loss models for companies}, series = {Water resources research}, volume = {56}, journal = {Water resources research}, number = {9}, publisher = {American Geophysical Union}, address = {Washington}, issn = {0043-1397}, doi = {10.1029/2020WR027649}, pages = {19}, year = {2020}, abstract = {Flood loss modeling is a central component of flood risk analysis. Conventionally, this involves univariable and deterministic stage-damage functions. Recent advancements in the field promote the use of multivariable and probabilistic loss models, which consider variables beyond inundation depth and account for prediction uncertainty. Although companies contribute significantly to total loss figures, novel modeling approaches for companies are lacking. Scarce data and the heterogeneity among companies impede the development of company flood loss models. We present three multivariable flood loss models for companies from the manufacturing, commercial, financial, and service sector that intrinsically quantify prediction uncertainty. Based on object-level loss data (n = 1,306), we comparatively evaluate the predictive capacity of Bayesian networks, Bayesian regression, and random forest in relation to deterministic and probabilistic stage-damage functions, serving as benchmarks. The company loss data stem from four postevent surveys in Germany between 2002 and 2013 and include information on flood intensity, company characteristics, emergency response, private precaution, and resulting loss to building, equipment, and goods and stock. We find that the multivariable probabilistic models successfully identify and reproduce essential relationships of flood damage processes in the data. The assessment of model skill focuses on the precision of the probabilistic predictions and reveals that the candidate models outperform the stage-damage functions, while differences among the proposed models are negligible. Although the combination of multivariable and probabilistic loss estimation improves predictive accuracy over the entire data set, wide predictive distributions stress the necessity for the quantification of uncertainty.}, language = {en} } @article{ReineckeTrautmannWageneretal.2022, author = {Reinecke, Robert and Trautmann, Tim and Wagener, Thorsten and Sch{\"u}ler, Katja}, title = {The critical need to foster computational reproducibility}, series = {Environmental research letters}, volume = {17}, journal = {Environmental research letters}, number = {4}, publisher = {IOP Publ. Ltd.}, address = {Bristol}, issn = {1748-9326}, doi = {10.1088/1748-9326/ac5cf8}, pages = {5}, year = {2022}, language = {en} } @article{NevillNegraMyersetal.2020, author = {Nevill, Alan M. and Negra, Yassine and Myers, Tony D. and Sammoud, Senda and Chaabene, Helmi}, title = {Key somatic variables associated with, and differences between the 4 swimming strokes}, series = {Journal of sports sciences}, volume = {38}, journal = {Journal of sports sciences}, number = {7}, publisher = {Routledge, Taylor \& Francis Group}, address = {London}, issn = {0264-0414}, doi = {10.1080/02640414.2020.1734311}, pages = {787 -- 794}, year = {2020}, abstract = {This study identified key somatic and demographic characteristics that benefit all swimmers and, at the same time, identified further characteristics that benefit only specific swimming strokes. Three hundred sixty-three competitive-level swimmers (male [n = 202]; female [n = 161]) participated in the study. We adopted a multiplicative, allometric regression model to identify the key characteristics associated with 100 m swimming speeds (controlling for age). The model was refined using backward elimination. Characteristics that benefited some but not all strokes were identified by introducing stroke-by-predictor variable interactions. The regression analysis revealed 7 "common" characteristics that benefited all swimmers suggesting that all swimmers benefit from having less body fat, broad shoulders and hips, a greater arm span (but shorter lower arms) and greater forearm girths with smaller relaxed arm girths. The 4 stroke-specific characteristics reveal that backstroke swimmers benefit from longer backs, a finding that can be likened to boats with longer hulls also travel faster through the water. Other stroke-by-predictor variable interactions (taken together) identified that butterfly swimmers are characterized by greater muscularity in the lower legs. These results highlight the importance of considering somatic and demographic characteristics of young swimmers for talent identification purposes (i.e., to ensure that swimmers realize their most appropriate strokes).}, language = {en} } @article{NaliboffGlerumBruneetal.2020, author = {Naliboff, John B. and Glerum, Anne and Brune, Sascha and P{\´e}ron-Pinvidic, G. and Wrona, Thilo}, title = {Development of 3-D rift heterogeneity through fault network evolution}, series = {Geophysical Research Letters}, volume = {47}, journal = {Geophysical Research Letters}, number = {13}, publisher = {John Wiley \& Sons, Inc.}, address = {New Jersey}, pages = {11}, year = {2020}, abstract = {Observations of rift and rifted margin architecture suggest that significant spatial and temporal structural heterogeneity develops during the multiphase evolution of continental rifting. Inheritance is often invoked to explain this heterogeneity, such as preexisting anisotropies in rock composition, rheology, and deformation. Here, we use high-resolution 3-D thermal-mechanical numerical models of continental extension to demonstrate that rift-parallel heterogeneity may develop solely through fault network evolution during the transition from distributed to localized deformation. In our models, the initial phase of distributed normal faulting is seeded through randomized initial strength perturbations in an otherwise laterally homogeneous lithosphere extending at a constant rate. Continued extension localizes deformation onto lithosphere-scale faults, which are laterally offset by tens of km and discontinuous along-strike. These results demonstrate that rift- and margin-parallel heterogeneity of large-scale fault patterns may in-part be a natural byproduct of fault network coalescence.}, language = {en} } @misc{Metzler2020, author = {Metzler, Ralf}, title = {Superstatistics and non-Gaussian diffusion}, series = {The European physical journal special topics}, volume = {229}, journal = {The European physical journal special topics}, number = {5}, publisher = {Springer}, address = {Heidelberg}, issn = {1951-6355}, doi = {10.1140/epjst/e2020-900210-x}, pages = {711 -- 728}, year = {2020}, abstract = {Brownian motion and viscoelastic anomalous diffusion in homogeneous environments are intrinsically Gaussian processes. In a growing number of systems, however, non-Gaussian displacement distributions of these processes are being reported. The physical cause of the non-Gaussianity is typically seen in different forms of disorder. These include, for instance, imperfect "ensembles" of tracer particles, the presence of local variations of the tracer mobility in heteroegenous environments, or cases in which the speed or persistence of moving nematodes or cells are distributed. From a theoretical point of view stochastic descriptions based on distributed ("superstatistical") transport coefficients as well as time-dependent generalisations based on stochastic transport parameters with built-in finite correlation time are invoked. After a brief review of the history of Brownian motion and the famed Gaussian displacement distribution, we here provide a brief introduction to the phenomenon of non-Gaussianity and the stochastic modelling in terms of superstatistical and diffusing-diffusivity approaches.}, language = {en} } @article{LeungLeutbecherReichetal.2019, author = {Leung, Tsz Yan and Leutbecher, Martin and Reich, Sebastian and Shepherd, Theodore G.}, title = {Atmospheric Predictability: Revisiting the Inherent Finite-Time Barrier}, series = {Journal of the atmospheric sciences}, volume = {76}, journal = {Journal of the atmospheric sciences}, number = {12}, publisher = {American Meteorological Soc.}, address = {Boston}, issn = {0022-4928}, doi = {10.1175/JAS-D-19-0057.1}, pages = {3883 -- 3892}, year = {2019}, abstract = {The accepted idea that there exists an inherent finite-time barrier in deterministically predicting atmospheric flows originates from Edward N. Lorenz's 1969 work based on two-dimensional (2D) turbulence. Yet, known analytic results on the 2D Navier-Stokes (N-S) equations suggest that one can skillfully predict the 2D N-S system indefinitely far ahead should the initial-condition error become sufficiently small, thereby presenting a potential conflict with Lorenz's theory. Aided by numerical simulations, the present work reexamines Lorenz's model and reviews both sides of the argument, paying particular attention to the roles played by the slope of the kinetic energy spectrum. It is found that when this slope is shallower than -3, the Lipschitz continuity of analytic solutions (with respect to initial conditions) breaks down as the model resolution increases, unless the viscous range of the real system is resolved—which remains practically impossible. This breakdown leads to the inherent finite-time limit. If, on the other hand, the spectral slope is steeper than -3, then the breakdown does not occur. In this way, the apparent contradiction between the analytic results and Lorenz's theory is reconciled.}, language = {en} } @article{CherstvyChechkinMetzler2014, author = {Cherstvy, Andrey G. and Chechkin, Aleksei V. and Metzler, Ralf}, title = {Particle invasion, survival, and non-ergodicity in 2D diffusion processes with space-dependent diffusivity}, series = {Soft matter}, volume = {2014}, journal = {Soft matter}, number = {10}, publisher = {Royal Society of Chemistry}, issn = {2046-2069}, doi = {10.1039/c3sm52846d}, pages = {1591 -- 1601}, year = {2014}, abstract = {We study the thermal Markovian diffusion of tracer particles in a 2D medium with spatially varying diffusivity D(r), mimicking recently measured, heterogeneous maps of the apparent diffusion coefficient in biological cells. For this heterogeneous diffusion process (HDP) we analyse the mean squared displacement (MSD) of the tracer particles, the time averaged MSD, the spatial probability density function, and the first passage time dynamics from the cell boundary to the nucleus. Moreover we examine the non-ergodic properties of this process which are important for the correct physical interpretation of time averages of observables obtained from single particle tracking experiments. From extensive computer simulations of the 2D stochastic Langevin equation we present an in-depth study of this HDP. In particular, we find that the MSDs along the radial and azimuthal directions in a circular domain obey anomalous and Brownian scaling, respectively. We demonstrate that the time averaged MSD stays linear as a function of the lag time and the system thus reveals a weak ergodicity breaking. Our results will enable one to rationalise the diffusive motion of larger tracer particles such as viruses or submicron beads in biological cells.}, language = {en} } @article{AyzelSchefferHeistermann2020, author = {Ayzel, Georgy and Scheffer, Tobias and Heistermann, Maik}, title = {RainNet v1.0}, series = {Geoscientific Model Development}, volume = {13}, journal = {Geoscientific Model Development}, number = {6}, publisher = {Copernicus Publ.}, address = {G{\"o}ttingen}, issn = {1991-959X}, doi = {10.5194/gmd-13-2631-2020}, pages = {2631 -- 2644}, year = {2020}, abstract = {In this study, we present RainNet, a deep convolutional neural network for radar-based precipitation nowcasting. Its design was inspired by the U-Net and SegNet families of deep learning models, which were originally designed for binary segmentation tasks. RainNet was trained to predict continuous precipitation intensities at a lead time of 5min, using several years of quality-controlled weather radar composites provided by the German Weather Service (DWD). That data set covers Germany with a spatial domain of 900km × 900km and has a resolution of 1km in space and 5min in time. Independent verification experiments were carried out on 11 summer precipitation events from 2016 to 2017. In order to achieve a lead time of 1h, a recursive approach was implemented by using RainNet predictions at 5min lead times as model inputs for longer lead times. In the verification experiments, trivial Eulerian persistence and a conventional model based on optical flow served as benchmarks. The latter is available in the rainymotion library and had previously been shown to outperform DWD's operational nowcasting model for the same set of verification events. RainNet significantly outperforms the benchmark models at all lead times up to 60min for the routine verification metrics mean absolute error (MAE) and the critical success index (CSI) at intensity thresholds of 0.125, 1, and 5mm h⁻¹. However, rainymotion turned out to be superior in predicting the exceedance of higher intensity thresholds (here 10 and 15mm h⁻¹). The limited ability of RainNet to predict heavy rainfall intensities is an undesirable property which we attribute to a high level of spatial smoothing introduced by the model. At a lead time of 5min, an analysis of power spectral density confirmed a significant loss of spectral power at length scales of 16km and below. Obviously, RainNet had learned an optimal level of smoothing to produce a nowcast at 5min lead time. In that sense, the loss of spectral power at small scales is informative, too, as it reflects the limits of predictability as a function of spatial scale. Beyond the lead time of 5min, however, the increasing level of smoothing is a mere artifact - an analogue to numerical diffusion - that is not a property of RainNet itself but of its recursive application. In the context of early warning, the smoothing is particularly unfavorable since pronounced features of intense precipitation tend to get lost over longer lead times. Hence, we propose several options to address this issue in prospective research, including an adjustment of the loss function for model training, model training for longer lead times, and the prediction of threshold exceedance in terms of a binary segmentation task. Furthermore, we suggest additional input data that could help to better identify situations with imminent precipitation dynamics. The model code, pretrained weights, and training data are provided in open repositories as an input for such future studies.}, language = {en} }