@article{ZienRaetschMikaetal.2000, author = {Zien, Alexander and R{\"a}tsch, Gunnar and Mika, Sebastian and Sch{\"o}lkopf, Bernhard and Lengauer, Thomas and M{\"u}ller, Klaus-Robert}, title = {Engineering support vector machine kernels that recognize translation initiation sites}, issn = {1367-4803}, year = {2000}, language = {en} } @article{ZieheMuellerNolteetal.2000, author = {Ziehe, Andreas and M{\"u}ller, Klaus-Robert and Nolte, G. and Mackert, B.-M. and Curio, Gabriel}, title = {Artifact reduction in magnetoneurography based on time-delayed second-order correlations}, year = {2000}, language = {en} } @article{ZieheLaskovNolteetal.2004, author = {Ziehe, Andreas and Laskov, Pavel and Nolte, G and M{\"u}ller, Klaus-Robert}, title = {A fast algorithm for joint diagonalization with non-orthogonal transformations and its application to blind source separation}, year = {2004}, abstract = {A new efficient algorithm is presented for joint diagonalization of several matrices. The algorithm is based on the Frobenius-norm formulation of the joint diagonalization problem, and addresses diagonalization with a general, non- orthogonal transformation. The iterative scheme of the algorithm is based on a multiplicative update which ensures the invertibility of the diagonalizer. The algorithm's efficiency stems from the special approximation of the cost function resulting in a sparse, block-diagonal Hessian to be used in the computation of the quasi-Newton update step. Extensive numerical simulations illustrate the performance of the algorithm and provide a comparison to other leading diagonalization methods. The results of such comparison demonstrate that the proposed algorithm is a viable alternative to existing state-of-the-art joint diagonalization algorithms. The practical use of our algorithm is shown for blind source separation problems}, language = {en} } @article{WuebbelerZieheMackertetal.2000, author = {W{\"u}bbeler, G. and Ziehe, Andreas and Mackert, B.-M. and M{\"u}ller, Klaus-Robert and Trahms, L. and Curio, Gabriel}, title = {Independent component analysis of noninvasively recorded cortical magnetic DC-fields in humans}, year = {2000}, language = {en} } @book{TsudaSugiyamaMueller2000, author = {Tsuda, Koji and Sugiyama, Masashi and M{\"u}ller, Klaus-Robert}, title = {Subspace information criterion for non-quadratice regularizers : model selection for sparse regressors}, series = {GMD-Report}, volume = {120}, journal = {GMD-Report}, publisher = {GMD-Forschungszentrum Informationstechnik}, address = {Sankt Augustin}, pages = {36 S.}, year = {2000}, language = {en} } @article{SugiyamaKawanabeMueller2004, author = {Sugiyama, Masashi and Kawanabe, Motoaki and M{\"u}ller, Klaus-Robert}, title = {Trading variance reduction with unbiasedness : the regularized subspace information criterion for robust model selection in kernel regression}, issn = {0899-7667}, year = {2004}, abstract = {A well-known result by Stein (1956) shows that in particular situations, biased estimators can yield better parameter estimates than their generally preferred unbiased counterparts. This letter follows the same spirit, as we will stabilize the unbiased generalization error estimates by regularization and finally obtain more robust model selection criteria for learning. We trade a small bias against a larger variance reduction, which has the beneficial effect of being more precise on a single training set. We focus on the subspace information criterion (SIC), which is an unbiased estimator of the expected generalization error measured by the reproducing kernel Hilbert space norm. SIC can be applied to the kernel regression, and it was shown in earlier experiments that a small regularization of SIC has a stabilization effect. However, it remained open how to appropriately determine the degree of regularization in SIC. In this article, we derive an unbiased estimator of the expected squared error, between SIC and the expected generalization error and propose determining the degree of regularization of SIC such that the estimator of the expected squared error is minimized. Computer simulations with artificial and real data sets illustrate that the proposed method works effectively for improving the precision of SIC, especially in the high-noise-level cases. We furthermore compare the proposed method to the original SIC, the cross-validation, and an empirical Bayesian method in ridge parameter selection, with good results}, language = {en} } @article{ShenoyKrauledatBlankertzetal.2006, author = {Shenoy, Pradeep and Krauledat, Matthias and Blankertz, Benjamin and Rao, Rajesh P. N. and M{\"u}ller, Klaus-Robert}, title = {Towards adaptive classification for BCI}, doi = {10.1088/1741-2560/3/1/R02}, year = {2006}, abstract = {Non-stationarities are ubiquitous in EEG signals. They are especially apparent in the use of EEG-based brain- computer interfaces (BCIs): (a) in the differences between the initial calibration measurement and the online operation of a BCI, or (b) caused by changes in the subject's brain processes during an experiment (e.g. due to fatigue, change of task involvement, etc). In this paper, we quantify for the first time such systematic evidence of statistical differences in data recorded during offline and online sessions. Furthermore, we propose novel techniques of investigating and visualizing data distributions, which are particularly useful for the analysis of (non-) stationarities. Our study shows that the brain signals used for control can change substantially from the offline calibration sessions to online control, and also within a single session. In addition to this general characterization of the signals, we propose several adaptive classification schemes and study their performance on data recorded during online experiments. An encouraging result of our study is that surprisingly simple adaptive methods in combination with an offline feature selection scheme can significantly increase BCI performance}, language = {en} } @article{RaetschSchoelkopfSmolaetal.2000, author = {R{\"a}tsch, Gunnar and Sch{\"o}lkopf, B. and Smola, Alexander J. and M{\"u}ller, Klaus-Robert and Mika, Sebastian}, title = {V-Arc : ensemble learning in the preence of outliers}, year = {2000}, language = {en} } @article{RaetschSchoelkopfSmolaetal.2000, author = {R{\"a}tsch, Gunnar and Sch{\"o}lkopf, B. and Smola, Alexander J. and Mika, Sebastian and Onoda, T. and M{\"u}ller, Klaus-Robert}, title = {Robust ensemble learning}, isbn = {0-262-19448-1}, year = {2000}, language = {en} } @article{RaetschSchoelkopfSmolaetal.2000, author = {R{\"a}tsch, Gunnar and Sch{\"o}lkopf, B. and Smola, Alexander J. and Mika, Sebastian and Onoda, T. and M{\"u}ller, Klaus-Robert}, title = {Robust ensemble learning for data analysis}, year = {2000}, language = {en} }