@article{BlanchardMuecke2018, author = {Blanchard, Gilles and M{\"u}cke, Nicole}, title = {Optimal rates for regularization of statistical inverse learning problems}, series = {Foundations of Computational Mathematics}, volume = {18}, journal = {Foundations of Computational Mathematics}, number = {4}, publisher = {Springer}, address = {New York}, issn = {1615-3375}, doi = {10.1007/s10208-017-9359-7}, pages = {971 -- 1013}, year = {2018}, abstract = {We consider a statistical inverse learning (also called inverse regression) problem, where we observe the image of a function f through a linear operator A at i.i.d. random design points X-i , superposed with an additive noise. The distribution of the design points is unknown and can be very general. We analyze simultaneously the direct (estimation of Af) and the inverse (estimation of f) learning problems. In this general framework, we obtain strong and weak minimax optimal rates of convergence (as the number of observations n grows large) for a large class of spectral regularization methods over regularity classes defined through appropriate source conditions. This improves on or completes previous results obtained in related settings. The optimality of the obtained rates is shown not only in the exponent in n but also in the explicit dependency of the constant factor in the variance of the noise and the radius of the source condition set.}, language = {en} } @misc{Rastogi2019, author = {Rastogi, Abhishake}, title = {Tikhonov regularization with oversmoothing penalty for linear statistical inverse learning problems}, series = {AIP Conference Proceedings : third international Conference of mathematical sciences (ICMS 2019)}, volume = {2183}, journal = {AIP Conference Proceedings : third international Conference of mathematical sciences (ICMS 2019)}, publisher = {American Institute of Physics}, address = {Melville}, isbn = {978-0-7354-1930-8}, issn = {0094-243X}, doi = {10.1063/1.5136221}, pages = {4}, year = {2019}, abstract = {In this paper, we consider the linear ill-posed inverse problem with noisy data in the statistical learning setting. The Tikhonov regularization scheme in Hilbert scales is considered in the reproducing kernel Hilbert space framework to reconstruct the estimator from the random noisy data. We discuss the rates of convergence for the regularized solution under the prior assumptions and link condition. For regression functions with smoothness given in terms of source conditions the error bound can explicitly be established.}, language = {en} }