@unpublished{ShlapunovTarkhanov2016, author = {Shlapunov, Alexander and Tarkhanov, Nikolai Nikolaevich}, title = {An open mapping theorem for the Navier-Stokes equations}, volume = {5}, number = {10}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {2193-6943}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-98687}, pages = {80}, year = {2016}, abstract = {We consider the Navier-Stokes equations in the layer R^n x [0,T] over R^n with finite T > 0. Using the standard fundamental solutions of the Laplace operator and the heat operator, we reduce the Navier-Stokes equations to a nonlinear Fredholm equation of the form (I+K) u = f, where K is a compact continuous operator in anisotropic normed H{\"o}lder spaces weighted at the point at infinity with respect to the space variables. Actually, the weight function is included to provide a finite energy estimate for solutions to the Navier-Stokes equations for all t in [0,T]. On using the particular properties of the de Rham complex we conclude that the Fr{\´e}chet derivative (I+K)' is continuously invertible at each point of the Banach space under consideration and the map I+K is open and injective in the space. In this way the Navier-Stokes equations prove to induce an open one-to-one mapping in the scale of H{\"o}lder spaces.}, language = {en} } @unpublished{BlanchardKraemer2016, author = {Blanchard, Gilles and Kr{\"a}mer, Nicole}, title = {Convergence rates of kernel conjugate gradient for random design regression}, volume = {5}, number = {8}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {2193-6943}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-94195}, pages = {31}, year = {2016}, abstract = {We prove statistical rates of convergence for kernel-based least squares regression from i.i.d. data using a conjugate gradient algorithm, where regularization against overfitting is obtained by early stopping. This method is related to Kernel Partial Least Squares, a regression method that combines supervised dimensionality reduction with least squares projection. Following the setting introduced in earlier related literature, we study so-called "fast convergence rates" depending on the regularity of the target regression function (measured by a source condition in terms of the kernel integral operator) and on the effective dimensionality of the data mapped into the kernel space. We obtain upper bounds, essentially matching known minimax lower bounds, for the L^2 (prediction) norm as well as for the stronger Hilbert norm, if the true regression function belongs to the reproducing kernel Hilbert space. If the latter assumption is not fulfilled, we obtain similar convergence rates for appropriate norms, provided additional unlabeled data are available.}, language = {en} } @unpublished{RoellyVallois2016, author = {Roelly, Sylvie and Vallois, Pierre}, title = {Convoluted Brownian motion}, volume = {5}, number = {9}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {2193-6943}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-96339}, pages = {37}, year = {2016}, abstract = {In this paper we analyse semimartingale properties of a class of Gaussian periodic processes, called convoluted Brownian motions, obtained by convolution between a deterministic function and a Brownian motion. A classical example in this class is the periodic Ornstein-Uhlenbeck process. We compute their characteristics and show that in general, they are neither Markovian nor satisfy a time-Markov field property. Nevertheless, by enlargement of filtration and/or addition of a one-dimensional component, one can in some case recover the Markovianity. We treat exhaustively the case of the bidimensional trigonometric convoluted Brownian motion and the higher-dimensional monomial convoluted Brownian motion.}, language = {en} } @unpublished{VasilievTarkhanov2016, author = {Vasiliev, Serguei and Tarkhanov, Nikolai Nikolaevich}, title = {Construction of series of perfect lattices by layer superposition}, volume = {5}, number = {11}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {2193-6943}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-100591}, pages = {11}, year = {2016}, abstract = {We construct a new series of perfect lattices in n dimensions by the layer superposition method of Delaunay-Barnes.}, language = {en} } @unpublished{GairingHoegeleKosenkova2016, author = {Gairing, Jan and H{\"o}gele, Michael and Kosenkova, Tetiana}, title = {Transportation distances and noise sensitivity of multiplicative L{\´e}vy SDE with applications}, volume = {5}, number = {2}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {2193-6943}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-86693}, pages = {24}, year = {2016}, abstract = {This article assesses the distance between the laws of stochastic differential equations with multiplicative L{\´e}vy noise on path space in terms of their characteristics. The notion of transportation distance on the set of L{\´e}vy kernels introduced by Kosenkova and Kulik yields a natural and statistically tractable upper bound on the noise sensitivity. This extends recent results for the additive case in terms of coupling distances to the multiplicative case. The strength of this notion is shown in a statistical implementation for simulations and the example of a benchmark time series in paleoclimate.}, language = {en} } @unpublished{FedchenkoTarkhanov2016, author = {Fedchenko, Dmitry and Tarkhanov, Nikolai Nikolaevich}, title = {Boundary value problems for elliptic complexes}, volume = {5}, number = {3}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {2193-6943}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-86705}, pages = {12}, year = {2016}, abstract = {The aim of this paper is to bring together two areas which are of great importance for the study of overdetermined boundary value problems. The first area is homological algebra which is the main tool in constructing the formal theory of overdetermined problems. And the second area is the global calculus of pseudodifferential operators which allows one to develop explicit analysis.}, language = {en} } @unpublished{AlsaedyTarkhanov2016, author = {Alsaedy, Ammar and Tarkhanov, Nikolai Nikolaevich}, title = {A Hilbert boundary value problem for generalised Cauchy-Riemann equations}, volume = {5}, number = {1}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {2193-6943}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-86109}, pages = {21}, year = {2016}, abstract = {We elaborate a boundary Fourier method for studying an analogue of the Hilbert problem for analytic functions within the framework of generalised Cauchy-Riemann equations. The boundary value problem need not satisfy the Shapiro-Lopatinskij condition and so it fails to be Fredholm in Sobolev spaces. We show a solvability condition of the Hilbert problem, which looks like those for ill-posed problems, and construct an explicit formula for approximate solutions.}, language = {en} } @unpublished{Alsaedy2016, author = {Alsaedy, Ammar}, title = {Variational primitive of a differential form}, volume = {5}, number = {4}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {2193-6943}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-89223}, pages = {8}, year = {2016}, abstract = {In this paper we specify the Dirichlet to Neumann operator related to the Cauchy problem for the gradient operator with data on a part of the boundary. To this end, we consider a nonlinear relaxation of this problem which is a mixed boundary problem of Zaremba type for the p-Laplace equation.}, language = {en} } @unpublished{BlanchardMuecke2016, author = {Blanchard, Gilles and M{\"u}cke, Nicole}, title = {Optimal rates for regularization of statistical inverse learning problems}, volume = {5}, number = {5}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {2193-6943}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-89782}, pages = {36}, year = {2016}, abstract = {We consider a statistical inverse learning problem, where we observe the image of a function f through a linear operator A at i.i.d. random design points X_i, superposed with an additional noise. The distribution of the design points is unknown and can be very general. We analyze simultaneously the direct (estimation of Af) and the inverse (estimation of f) learning problems. In this general framework, we obtain strong and weak minimax optimal rates of convergence (as the number of observations n grows large) for a large class of spectral regularization methods over regularity classes defined through appropriate source conditions. This improves on or completes previous results obtained in related settings. The optimality of the obtained rates is shown not only in the exponent in n but also in the explicit dependence of the constant factor in the variance of the noise and the radius of the source condition set.}, language = {en} } @unpublished{MeraTarkhanov2016, author = {Mera, Azal and Tarkhanov, Nikolai Nikolaevich}, title = {The Neumann problem after Spencer}, volume = {5}, number = {6}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {2193-6943}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-90631}, pages = {21}, year = {2016}, abstract = {When trying to extend the Hodge theory for elliptic complexes on compact closed manifolds to the case of compact manifolds with boundary one is led to a boundary value problem for the Laplacian of the complex which is usually referred to as Neumann problem. We study the Neumann problem for a larger class of sequences of differential operators on a compact manifold with boundary. These are sequences of small curvature, i.e., bearing the property that the composition of any two neighbouring operators has order less than two.}, language = {en} }