@misc{BackhausEngbertRothkegeletal.2020, author = {Backhaus, Daniel and Engbert, Ralf and Rothkegel, Lars Oliver Martin and Trukenbrod, Hans Arne}, title = {Task-dependence in scene perception: Head unrestrained viewing using mobile eye-tracking}, series = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, journal = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, number = {5}, issn = {1866-8364}, doi = {10.25932/publishup-51912}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-519124}, pages = {23}, year = {2020}, abstract = {Real-world scene perception is typically studied in the laboratory using static picture viewing with restrained head position. Consequently, the transfer of results obtained in this paradigm to real-word scenarios has been questioned. The advancement of mobile eye-trackers and the progress in image processing, however, permit a more natural experimental setup that, at the same time, maintains the high experimental control from the standard laboratory setting. We investigated eye movements while participants were standing in front of a projector screen and explored images under four specific task instructions. Eye movements were recorded with a mobile eye-tracking device and raw gaze data were transformed from head-centered into image-centered coordinates. We observed differences between tasks in temporal and spatial eye-movement parameters and found that the bias to fixate images near the center differed between tasks. Our results demonstrate that current mobile eye-tracking technology and a highly controlled design support the study of fine-scaled task dependencies in an experimental setting that permits more natural viewing behavior than the static picture viewing paradigm.}, language = {en} } @article{BackhausEngbertRothkegeletal.2020, author = {Backhaus, Daniel and Engbert, Ralf and Rothkegel, Lars Oliver Martin and Trukenbrod, Hans Arne}, title = {Task-dependence in scene perception: Head unrestrained viewing using mobile eye-tracking}, series = {Journal of vision}, volume = {20}, journal = {Journal of vision}, number = {5}, publisher = {Association for Research in Vision and Opthalmology}, address = {Rockville}, issn = {1534-7362}, doi = {10.1167/jov.20.5.3}, pages = {1 -- 21}, year = {2020}, abstract = {Real-world scene perception is typically studied in the laboratory using static picture viewing with restrained head position. Consequently, the transfer of results obtained in this paradigm to real-word scenarios has been questioned. The advancement of mobile eye-trackers and the progress in image processing, however, permit a more natural experimental setup that, at the same time, maintains the high experimental control from the standard laboratory setting. We investigated eye movements while participants were standing in front of a projector screen and explored images under four specific task instructions. Eye movements were recorded with a mobile eye-tracking device and raw gaze data were transformed from head-centered into image-centered coordinates. We observed differences between tasks in temporal and spatial eye-movement parameters and found that the bias to fixate images near the center differed between tasks. Our results demonstrate that current mobile eye-tracking technology and a highly controlled design support the study of fine-scaled task dependencies in an experimental setting that permits more natural viewing behavior than the static picture viewing paradigm.}, language = {en} } @article{BaptistaBohnKliegletal.2008, author = {Baptista, Murilo da Silva and Bohn, Christiane and Kliegl, Reinhold and Engbert, Ralf and Kurths, J{\"u}rgen}, title = {Reconstruction of eye movements during blinks}, issn = {1054-1500}, doi = {10.1063/1.2890843}, year = {2008}, language = {en} } @article{BarthelmeTrukenbrodEngbertetal.2013, author = {Barthelme, Simon and Trukenbrod, Hans Arne and Engbert, Ralf and Wichmann, Felix A.}, title = {Modeling fixation locations using spatial point processes}, series = {Journal of vision}, volume = {13}, journal = {Journal of vision}, number = {12}, publisher = {Association for Research in Vision and Opthalmology}, address = {Rockville}, issn = {1534-7362}, doi = {10.1167/13.12.1}, pages = {34}, year = {2013}, abstract = {Whenever eye movements are measured, a central part of the analysis has to do with where subjects fixate and why they fixated where they fixated. To a first approximation, a set of fixations can be viewed as a set of points in space; this implies that fixations are spatial data and that the analysis of fixation locations can be beneficially thought of as a spatial statistics problem. We argue that thinking of fixation locations as arising from point processes is a very fruitful framework for eye-movement data, helping turn qualitative questions into quantitative ones. We provide a tutorial introduction to some of the main ideas of the field of spatial statistics, focusing especially on spatial Poisson processes. We show how point processes help relate image properties to fixation locations. In particular we show how point processes naturally express the idea that image features' predictability for fixations may vary from one image to another. We review other methods of analysis used in the literature, show how they relate to point process theory, and argue that thinking in terms of point processes substantially extends the range of analyses that can be performed and clarify their interpretation.}, language = {en} } @article{BettenbuehlRusconiEngbertetal.2012, author = {Bettenb{\"u}hl, Mario and Rusconi, Marco and Engbert, Ralf and Holschneider, Matthias}, title = {Bayesian selection of Markov Models for symbol sequences application to microsaccadic eye movements}, series = {PLoS one}, volume = {7}, journal = {PLoS one}, number = {9}, publisher = {PLoS}, address = {San Fransisco}, issn = {1932-6203}, doi = {10.1371/journal.pone.0043388}, pages = {10}, year = {2012}, abstract = {Complex biological dynamics often generate sequences of discrete events which can be described as a Markov process. The order of the underlying Markovian stochastic process is fundamental for characterizing statistical dependencies within sequences. As an example for this class of biological systems, we investigate the Markov order of sequences of microsaccadic eye movements from human observers. We calculate the integrated likelihood of a given sequence for various orders of the Markov process and use this in a Bayesian framework for statistical inference on the Markov order. Our analysis shows that data from most participants are best explained by a first-order Markov process. This is compatible with recent findings of a statistical coupling of subsequent microsaccade orientations. Our method might prove to be useful for a broad class of biological systems.}, language = {en} } @misc{CajarEngbertLaubrock2022, author = {Cajar, Anke and Engbert, Ralf and Laubrock, Jochen}, title = {Potsdam Eye-Movement Corpus for Scene Memorization and Search With Color and Spatial-Frequency Filtering}, series = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, journal = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1866-8364}, doi = {10.25932/publishup-56318}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-563184}, pages = {1 -- 7}, year = {2022}, language = {en} } @article{CajarEngbertLaubrock2022, author = {Cajar, Anke and Engbert, Ralf and Laubrock, Jochen}, title = {Potsdam Eye-Movement Corpus for Scene Memorization and Search With Color and Spatial-Frequency Filtering}, series = {Frontiers in psychology / Frontiers Research Foundation}, volume = {13}, journal = {Frontiers in psychology / Frontiers Research Foundation}, publisher = {Frontiers Research Foundation}, address = {Lausanne, Schweiz}, issn = {1664-1078}, doi = {10.3389/fpsyg.2022.850482}, pages = {1 -- 7}, year = {2022}, language = {en} } @article{CajarEngbertLaubrock2016, author = {Cajar, Anke and Engbert, Ralf and Laubrock, Jochen}, title = {Spatial frequency processing in the central and peripheral visual field during scene viewing}, series = {Vision research : an international journal for functional aspects of vision.}, volume = {127}, journal = {Vision research : an international journal for functional aspects of vision.}, publisher = {Elsevier}, address = {Oxford}, issn = {0042-6989}, doi = {10.1016/j.visres.2016.05.008}, pages = {186 -- 197}, year = {2016}, abstract = {Visuospatial attention and gaze control depend on the interaction of foveal and peripheral processing. The foveal and peripheral regions of the visual field are differentially sensitive to parts of the spatial frequency spectrum. In two experiments, we investigated how the selective attenuation of spatial frequencies in the central or the peripheral visual field affects eye-movement behavior during real-world scene viewing. Gaze-contingent low-pass or high-pass filters with varying filter levels (i.e., cutoff frequencies; Experiment 1) or filter sizes (Experiment 2) were applied. Compared to unfiltered control conditions, mean fixation durations increased most with central high-pass and peripheral low-pass filtering. Increasing filter size prolonged fixation durations with peripheral filtering, but not with central filtering. Increasing filter level prolonged fixation durations with low-pass filtering, but not with high-pass filtering. These effects indicate that fixation durations are not always longer under conditions of increased processing difficulty. Saccade amplitudes largely adapted to processing difficulty: amplitudes increased with central filtering and decreased with peripheral filtering; the effects strengthened with increasing filter size and filter level. In addition, we observed a trade-off between saccade timing and saccadic selection, since saccade amplitudes were modulated when fixation durations were unaffected by the experimental manipulations. We conclude that interactions of perception and gaze control are highly sensitive to experimental manipulations of input images as long as the residual information can still be accessed for gaze control. (C) 2016 Elsevier Ltd. All rights reserved.}, language = {en} } @misc{CajarEngbertLaubrock2016, author = {Cajar, Anke and Engbert, Ralf and Laubrock, Jochen}, title = {Eye movements during gaze-contingent spatial-frequency filtering of real-world scenes: Effects of filter location, cutoff, and size}, series = {Perception}, volume = {45}, journal = {Perception}, publisher = {Sage Publ.}, address = {London}, issn = {0301-0066}, pages = {126 -- 126}, year = {2016}, language = {en} } @article{CajarEngbertLaubrock2020, author = {Cajar, Anke and Engbert, Ralf and Laubrock, Jochen}, title = {How spatial frequencies and color drive object search in real-world scenes}, series = {Journal of vision}, volume = {20}, journal = {Journal of vision}, number = {7}, publisher = {Association for Research in Vision and Opthalmology}, address = {Rockville}, issn = {1534-7362}, doi = {10.1167/jov.20.7.8}, pages = {16}, year = {2020}, abstract = {When studying how people search for objects in scenes, the inhomogeneity of the visual field is often ignored. Due to physiological limitations, peripheral vision is blurred and mainly uses coarse-grained information (i.e., low spatial frequencies) for selecting saccade targets, whereas high-acuity central vision uses fine-grained information (i.e., high spatial frequencies) for analysis of details. Here we investigated how spatial frequencies and color affect object search in real-world scenes. Using gaze-contingent filters, we attenuated high or low frequencies in central or peripheral vision while viewers searched color or grayscale scenes. Results showed that peripheral filters and central high-pass filters hardly affected search accuracy, whereas accuracy dropped drastically with central low-pass filters. Peripheral filtering increased the time to localize the target by decreasing saccade amplitudes and increasing number and duration of fixations. The use of coarse-grained information in the periphery was limited to color scenes. Central filtering increased the time to verify target identity instead, especially with low-pass filters. We conclude that peripheral vision is critical for object localization and central vision is critical for object identification. Visual guidance during peripheral object localization is dominated by low-frequency color information, whereas high-frequency information, relatively independent of color, is most important for object identification in central vision.}, language = {en} }