@misc{BridwellCavanaghCollinsetal.2018, author = {Bridwell, David A. and Cavanagh, James F. and Collins, Anne G. E. and Nunez, Michael D. and Srinivasan, Ramesh and Stober, Sebastian and Calhoun, Vince D.}, title = {Moving beyond ERP components}, series = {Postprints der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, number = {656}, issn = {1866-8364}, doi = {10.25932/publishup-45966}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-459667}, pages = {19}, year = {2018}, abstract = {Relationships between neuroimaging measures and behavior provide important clues about brain function and cognition in healthy and clinical populations. While electroencephalography (EEG) provides a portable, low cost measure of brain dynamics, it has been somewhat underrepresented in the emerging field of model-based inference. We seek to address this gap in this article by highlighting the utility of linking EEG and behavior, with an emphasis on approaches for EEG analysis that move beyond focusing on peaks or "components" derived from averaging EEG responses across trials and subjects (generating the event-related potential, ERP). First, we review methods for deriving features from EEG in order to enhance the signal within single-trials. These methods include filtering based on user-defined features (i.e., frequency decomposition, time-frequency decomposition), filtering based on data-driven properties (i.e., blind source separation, BSS), and generating more abstract representations of data (e.g., using deep learning). We then review cognitive models which extract latent variables from experimental tasks, including the drift diffusion model (DDM) and reinforcement learning (RL) approaches. Next, we discuss ways to access associations among these measures, including statistical models, data-driven joint models and cognitive joint modeling using hierarchical Bayesian models (HBMs). We think that these methodological tools are likely to contribute to theoretical advancements, and will help inform our understandings of brain dynamics that contribute to moment-to-moment cognitive function.}, language = {en} } @unpublished{BlanchardKraemer2016, author = {Blanchard, Gilles and Kr{\"a}mer, Nicole}, title = {Convergence rates of kernel conjugate gradient for random design regression}, volume = {5}, number = {8}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {2193-6943}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-94195}, pages = {31}, year = {2016}, abstract = {We prove statistical rates of convergence for kernel-based least squares regression from i.i.d. data using a conjugate gradient algorithm, where regularization against overfitting is obtained by early stopping. This method is related to Kernel Partial Least Squares, a regression method that combines supervised dimensionality reduction with least squares projection. Following the setting introduced in earlier related literature, we study so-called "fast convergence rates" depending on the regularity of the target regression function (measured by a source condition in terms of the kernel integral operator) and on the effective dimensionality of the data mapped into the kernel space. We obtain upper bounds, essentially matching known minimax lower bounds, for the L^2 (prediction) norm as well as for the stronger Hilbert norm, if the true regression function belongs to the reproducing kernel Hilbert space. If the latter assumption is not fulfilled, we obtain similar convergence rates for appropriate norms, provided additional unlabeled data are available.}, language = {en} }