@article{HartungWahlRastogietal.2021, author = {Hartung, Niklas and Wahl, Martin and Rastogi, Abhishake and Huisinga, Wilhelm}, title = {Nonparametric goodness-of-fit testing for parametric covariate models in pharmacometric analyses}, series = {CPT: pharmacometrics \& systems pharmacology}, volume = {10}, journal = {CPT: pharmacometrics \& systems pharmacology}, number = {6}, publisher = {Nature Publ. Group}, address = {London}, issn = {2163-8306}, doi = {10.1002/psp4.12614}, pages = {564 -- 576}, year = {2021}, abstract = {The characterization of covariate effects on model parameters is a crucial step during pharmacokinetic/pharmacodynamic analyses. Although covariate selection criteria have been studied extensively, the choice of the functional relationship between covariates and parameters, however, has received much less attention. Often, a simple particular class of covariate-to-parameter relationships (linear, exponential, etc.) is chosen ad hoc or based on domain knowledge, and a statistical evaluation is limited to the comparison of a small number of such classes. Goodness-of-fit testing against a nonparametric alternative provides a more rigorous approach to covariate model evaluation, but no such test has been proposed so far. In this manuscript, we derive and evaluate nonparametric goodness-of-fit tests for parametric covariate models, the null hypothesis, against a kernelized Tikhonov regularized alternative, transferring concepts from statistical learning to the pharmacological setting. The approach is evaluated in a simulation study on the estimation of the age-dependent maturation effect on the clearance of a monoclonal antibody. Scenarios of varying data sparsity and residual error are considered. The goodness-of-fit test correctly identified misspecified parametric models with high power for relevant scenarios. The case study provides proof-of-concept of the feasibility of the proposed approach, which is envisioned to be beneficial for applications that lack well-founded covariate models.}, language = {en} } @article{Rastogi2020, author = {Rastogi, Abhishake}, title = {Tikhonov regularization with oversmoothing penalty for nonlinear statistical inverse problems}, series = {Communications on Pure and Applied Analysis}, volume = {19}, journal = {Communications on Pure and Applied Analysis}, number = {8}, publisher = {American Institute of Mathematical Sciences}, address = {Springfield}, issn = {1534-0392}, doi = {10.3934/cpaa.2020183}, pages = {4111 -- 4126}, year = {2020}, abstract = {In this paper, we consider the nonlinear ill-posed inverse problem with noisy data in the statistical learning setting. The Tikhonov regularization scheme in Hilbert scales is considered to reconstruct the estimator from the random noisy data. In this statistical learning setting, we derive the rates of convergence for the regularized solution under certain assumptions on the nonlinear forward operator and the prior assumptions. We discuss estimates of the reconstruction error using the approach of reproducing kernel Hilbert spaces.}, language = {en} } @misc{Rastogi2019, author = {Rastogi, Abhishake}, title = {Tikhonov regularization with oversmoothing penalty for linear statistical inverse learning problems}, series = {AIP Conference Proceedings : third international Conference of mathematical sciences (ICMS 2019)}, volume = {2183}, journal = {AIP Conference Proceedings : third international Conference of mathematical sciences (ICMS 2019)}, publisher = {American Institute of Physics}, address = {Melville}, isbn = {978-0-7354-1930-8}, issn = {0094-243X}, doi = {10.1063/1.5136221}, pages = {4}, year = {2019}, abstract = {In this paper, we consider the linear ill-posed inverse problem with noisy data in the statistical learning setting. The Tikhonov regularization scheme in Hilbert scales is considered in the reproducing kernel Hilbert space framework to reconstruct the estimator from the random noisy data. We discuss the rates of convergence for the regularized solution under the prior assumptions and link condition. For regression functions with smoothness given in terms of source conditions the error bound can explicitly be established.}, language = {en} }