@article{SeeligRabeMalemShinitskietal.2020, author = {Seelig, Stefan A. and Rabe, Maximilian Michael and Malem-Shinitski, Noa and Risse, Sarah and Reich, Sebastian and Engbert, Ralf}, title = {Bayesian parameter estimation for the SWIFT model of eye-movement control during reading}, series = {Journal of mathematical psychology}, volume = {95}, journal = {Journal of mathematical psychology}, publisher = {Elsevier}, address = {San Diego}, issn = {0022-2496}, doi = {10.1016/j.jmp.2019.102313}, pages = {32}, year = {2020}, abstract = {Process-oriented theories of cognition must be evaluated against time-ordered observations. Here we present a representative example for data assimilation of the SWIFT model, a dynamical model of the control of fixation positions and fixation durations during natural reading of single sentences. First, we develop and test an approximate likelihood function of the model, which is a combination of a spatial, pseudo-marginal likelihood and a temporal likelihood obtained by probability density approximation Second, we implement a Bayesian approach to parameter inference using an adaptive Markov chain Monte Carlo procedure. Our results indicate that model parameters can be estimated reliably for individual subjects. We conclude that approximative Bayesian inference represents a considerable step forward for computational models of eye-movement control, where modeling of individual data on the basis of process-based dynamic models has not been possible so far.}, language = {en} } @article{KruegelRothkegelEngbert2020, author = {Kr{\"u}gel, Andr{\´e} and Rothkegel, Lars and Engbert, Ralf}, title = {No exception from Bayes' rule}, series = {Journal of vision}, volume = {20}, journal = {Journal of vision}, number = {7}, publisher = {ARVO}, address = {Rockville}, issn = {1534-7362}, doi = {10.1167/jov.20.7.15}, pages = {14}, year = {2020}, abstract = {In an influential theoretical model, human sensorimotor control is achieved by a Bayesian decision process, which combines noisy sensory information and learned prior knowledge. A ubiquitous signature of prior knowledge and Bayesian integration in human perception and motor behavior is the frequently observed bias toward an average stimulus magnitude (i.e., a central-tendency bias, range effect, regression-to-the-mean effect). However, in the domain of eye movements, there is a recent controversy about the fundamental existence of a range effect in the saccadic system. Here we argue that the problem of the existence of a range effect is linked to the availability of prior knowledge for saccade control. We present results from two prosaccade experiments that both employ an informative prior structure (i.e., a nonuniform Gaussian distribution of saccade target distances). Our results demonstrate the validity of Bayesian integration in saccade control, which generates a range effect in saccades. According to Bayesian integration principles, the saccadic range effect depends on the availability of prior knowledge and varies in size as a function of the reliability of the prior and the sensory likelihood.}, language = {en} } @article{EngbertTrukenbrodBarthelmeetal.2015, author = {Engbert, Ralf and Trukenbrod, Hans Arne and Barthelme, Simon and Wichmann, Felix A.}, title = {Spatial statistics and attentional dynamics in scene viewing}, series = {Journal of vision}, volume = {15}, journal = {Journal of vision}, number = {1}, publisher = {Association for Research in Vision and Opthalmology}, address = {Rockville}, issn = {1534-7362}, doi = {10.1167/15.1.14}, pages = {17}, year = {2015}, abstract = {In humans and in foveated animals visual acuity is highly concentrated at the center of gaze, so that choosing where to look next is an important example of online, rapid decision-making. Computational neuroscientists have developed biologically-inspired models of visual attention, termed saliency maps, which successfully predict where people fixate on average. Using point process theory for spatial statistics, we show that scanpaths contain, however, important statistical structure, such as spatial clustering on top of distributions of gaze positions. Here, we develop a dynamical model of saccadic selection that accurately predicts the distribution of gaze positions as well as spatial clustering along individual scanpaths. Our model relies on activation dynamics via spatially-limited (foveated) access to saliency information, and, second, a leaky memory process controlling the re-inspection of target regions. This theoretical framework models a form of context-dependent decision-making, linking neural dynamics of attention to behavioral gaze data.}, language = {en} } @article{CajarSchneeweissEngbertetal.2016, author = {Cajar, Anke and Schneeweiß, Paul and Engbert, Ralf and Laubrock, Jochen}, title = {Coupling of attention and saccades when viewing scenes with central and peripheral degradation}, series = {Journal of Vision}, volume = {16}, journal = {Journal of Vision}, number = {2}, publisher = {ARVO}, address = {Rockville, Md.}, issn = {1534-7362}, doi = {10.1167/16.2.8}, pages = {1 -- 19}, year = {2016}, abstract = {Degrading real-world scenes in the central or the peripheral visual field yields a characteristic pattern: Mean saccade amplitudes increase with central and decrease with peripheral degradation. Does this pattern reflect corresponding modulations of selective attention? If so, the observed saccade amplitude pattern should reflect more focused attention in the central region with peripheral degradation and an attentional bias toward the periphery with central degradation. To investigate this hypothesis, we measured the detectability of peripheral (Experiment 1) or central targets (Experiment 2) during scene viewing when low or high spatial frequencies were gaze-contingently filtered in the central or the peripheral visual field. Relative to an unfiltered control condition, peripheral filtering induced a decrease of the detection probability for peripheral but not for central targets (tunnel vision). Central filtering decreased the detectability of central but not of peripheral targets. Additional post hoc analyses are compatible with the interpretation that saccade amplitudes and direction are computed in partial independence. Our experimental results indicate that task-induced modulations of saccade amplitudes reflect attentional modulations.}, language = {en} } @article{CajarSchneeweissEngbertetal.2016, author = {Cajar, Anke and Schneeweiss, Paul and Engbert, Ralf and Laubrock, Jochen}, title = {Coupling of attention and saccades when viewing scenes with central and peripheral degradation}, series = {Journal of vision}, volume = {16}, journal = {Journal of vision}, publisher = {Association for Research in Vision and Opthalmology}, address = {Rockville}, issn = {1534-7362}, doi = {10.1167/16.2.8}, pages = {19}, year = {2016}, abstract = {Degrading real-world scenes in the central or the peripheral visual field yields a characteristic pattern: Mean saccade amplitudes increase with central and decrease with peripheral degradation. Does this pattern reflect corresponding modulations of selective attention? If so, the observed saccade amplitude pattern should reflect more focused attention in the central region with peripheral degradation and an attentional bias toward the periphery with central degradation. To investigate this hypothesis, we measured the detectability of peripheral (Experiment 1) or central targets (Experiment 2) during scene viewing when low or high spatial frequencies were gaze-contingently filtered in the central or the peripheral visual field. Relative to an unfiltered control condition, peripheral filtering induced a decrease of the detection probability for peripheral but not for central targets (tunnel vision). Central filtering decreased the detectability of central but not of peripheral targets. Additional post hoc analyses are compatible with the interpretation that saccade amplitudes and direction are computed in partial independence. Our experimental results indicate that task-induced modulations of saccade amplitudes reflect attentional modulations.}, language = {en} }