@misc{BackhausEngbertRothkegeletal.2020, author = {Backhaus, Daniel and Engbert, Ralf and Rothkegel, Lars Oliver Martin and Trukenbrod, Hans Arne}, title = {Task-dependence in scene perception: Head unrestrained viewing using mobile eye-tracking}, series = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, journal = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, number = {5}, issn = {1866-8364}, doi = {10.25932/publishup-51912}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-519124}, pages = {23}, year = {2020}, abstract = {Real-world scene perception is typically studied in the laboratory using static picture viewing with restrained head position. Consequently, the transfer of results obtained in this paradigm to real-word scenarios has been questioned. The advancement of mobile eye-trackers and the progress in image processing, however, permit a more natural experimental setup that, at the same time, maintains the high experimental control from the standard laboratory setting. We investigated eye movements while participants were standing in front of a projector screen and explored images under four specific task instructions. Eye movements were recorded with a mobile eye-tracking device and raw gaze data were transformed from head-centered into image-centered coordinates. We observed differences between tasks in temporal and spatial eye-movement parameters and found that the bias to fixate images near the center differed between tasks. Our results demonstrate that current mobile eye-tracking technology and a highly controlled design support the study of fine-scaled task dependencies in an experimental setting that permits more natural viewing behavior than the static picture viewing paradigm.}, language = {en} } @article{BackhausEngbertRothkegeletal.2020, author = {Backhaus, Daniel and Engbert, Ralf and Rothkegel, Lars Oliver Martin and Trukenbrod, Hans Arne}, title = {Task-dependence in scene perception: Head unrestrained viewing using mobile eye-tracking}, series = {Journal of vision}, volume = {20}, journal = {Journal of vision}, number = {5}, publisher = {Association for Research in Vision and Opthalmology}, address = {Rockville}, issn = {1534-7362}, doi = {10.1167/jov.20.5.3}, pages = {1 -- 21}, year = {2020}, abstract = {Real-world scene perception is typically studied in the laboratory using static picture viewing with restrained head position. Consequently, the transfer of results obtained in this paradigm to real-word scenarios has been questioned. The advancement of mobile eye-trackers and the progress in image processing, however, permit a more natural experimental setup that, at the same time, maintains the high experimental control from the standard laboratory setting. We investigated eye movements while participants were standing in front of a projector screen and explored images under four specific task instructions. Eye movements were recorded with a mobile eye-tracking device and raw gaze data were transformed from head-centered into image-centered coordinates. We observed differences between tasks in temporal and spatial eye-movement parameters and found that the bias to fixate images near the center differed between tasks. Our results demonstrate that current mobile eye-tracking technology and a highly controlled design support the study of fine-scaled task dependencies in an experimental setting that permits more natural viewing behavior than the static picture viewing paradigm.}, language = {en} } @misc{CajarEngbertLaubrock2022, author = {Cajar, Anke and Engbert, Ralf and Laubrock, Jochen}, title = {Potsdam Eye-Movement Corpus for Scene Memorization and Search With Color and Spatial-Frequency Filtering}, series = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, journal = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1866-8364}, doi = {10.25932/publishup-56318}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-563184}, pages = {1 -- 7}, year = {2022}, language = {en} } @article{CajarEngbertLaubrock2022, author = {Cajar, Anke and Engbert, Ralf and Laubrock, Jochen}, title = {Potsdam Eye-Movement Corpus for Scene Memorization and Search With Color and Spatial-Frequency Filtering}, series = {Frontiers in psychology / Frontiers Research Foundation}, volume = {13}, journal = {Frontiers in psychology / Frontiers Research Foundation}, publisher = {Frontiers Research Foundation}, address = {Lausanne, Schweiz}, issn = {1664-1078}, doi = {10.3389/fpsyg.2022.850482}, pages = {1 -- 7}, year = {2022}, language = {en} } @article{CajarEngbertLaubrock2020, author = {Cajar, Anke and Engbert, Ralf and Laubrock, Jochen}, title = {How spatial frequencies and color drive object search in real-world scenes}, series = {Journal of vision}, volume = {20}, journal = {Journal of vision}, number = {7}, publisher = {Association for Research in Vision and Opthalmology}, address = {Rockville}, issn = {1534-7362}, doi = {10.1167/jov.20.7.8}, pages = {16}, year = {2020}, abstract = {When studying how people search for objects in scenes, the inhomogeneity of the visual field is often ignored. Due to physiological limitations, peripheral vision is blurred and mainly uses coarse-grained information (i.e., low spatial frequencies) for selecting saccade targets, whereas high-acuity central vision uses fine-grained information (i.e., high spatial frequencies) for analysis of details. Here we investigated how spatial frequencies and color affect object search in real-world scenes. Using gaze-contingent filters, we attenuated high or low frequencies in central or peripheral vision while viewers searched color or grayscale scenes. Results showed that peripheral filters and central high-pass filters hardly affected search accuracy, whereas accuracy dropped drastically with central low-pass filters. Peripheral filtering increased the time to localize the target by decreasing saccade amplitudes and increasing number and duration of fixations. The use of coarse-grained information in the periphery was limited to color scenes. Central filtering increased the time to verify target identity instead, especially with low-pass filters. We conclude that peripheral vision is critical for object localization and central vision is critical for object identification. Visual guidance during peripheral object localization is dominated by low-frequency color information, whereas high-frequency information, relatively independent of color, is most important for object identification in central vision.}, language = {en} } @article{CajarSchneeweissEngbertetal.2016, author = {Cajar, Anke and Schneeweiss, Paul and Engbert, Ralf and Laubrock, Jochen}, title = {Coupling of attention and saccades when viewing scenes with central and peripheral degradation}, series = {Journal of vision}, volume = {16}, journal = {Journal of vision}, publisher = {Association for Research in Vision and Opthalmology}, address = {Rockville}, issn = {1534-7362}, doi = {10.1167/16.2.8}, pages = {19}, year = {2016}, abstract = {Degrading real-world scenes in the central or the peripheral visual field yields a characteristic pattern: Mean saccade amplitudes increase with central and decrease with peripheral degradation. Does this pattern reflect corresponding modulations of selective attention? If so, the observed saccade amplitude pattern should reflect more focused attention in the central region with peripheral degradation and an attentional bias toward the periphery with central degradation. To investigate this hypothesis, we measured the detectability of peripheral (Experiment 1) or central targets (Experiment 2) during scene viewing when low or high spatial frequencies were gaze-contingently filtered in the central or the peripheral visual field. Relative to an unfiltered control condition, peripheral filtering induced a decrease of the detection probability for peripheral but not for central targets (tunnel vision). Central filtering decreased the detectability of central but not of peripheral targets. Additional post hoc analyses are compatible with the interpretation that saccade amplitudes and direction are computed in partial independence. Our experimental results indicate that task-induced modulations of saccade amplitudes reflect attentional modulations.}, language = {en} } @article{CajarSchneeweissEngbertetal.2016, author = {Cajar, Anke and Schneeweiß, Paul and Engbert, Ralf and Laubrock, Jochen}, title = {Coupling of attention and saccades when viewing scenes with central and peripheral degradation}, series = {Journal of Vision}, volume = {16}, journal = {Journal of Vision}, number = {2}, publisher = {ARVO}, address = {Rockville, Md.}, issn = {1534-7362}, doi = {10.1167/16.2.8}, pages = {1 -- 19}, year = {2016}, abstract = {Degrading real-world scenes in the central or the peripheral visual field yields a characteristic pattern: Mean saccade amplitudes increase with central and decrease with peripheral degradation. Does this pattern reflect corresponding modulations of selective attention? If so, the observed saccade amplitude pattern should reflect more focused attention in the central region with peripheral degradation and an attentional bias toward the periphery with central degradation. To investigate this hypothesis, we measured the detectability of peripheral (Experiment 1) or central targets (Experiment 2) during scene viewing when low or high spatial frequencies were gaze-contingently filtered in the central or the peripheral visual field. Relative to an unfiltered control condition, peripheral filtering induced a decrease of the detection probability for peripheral but not for central targets (tunnel vision). Central filtering decreased the detectability of central but not of peripheral targets. Additional post hoc analyses are compatible with the interpretation that saccade amplitudes and direction are computed in partial independence. Our experimental results indicate that task-induced modulations of saccade amplitudes reflect attentional modulations.}, language = {en} } @article{TrukenbrodBarthelmeWichmannetal.2019, author = {Trukenbrod, Hans Arne and Barthelme, Simon and Wichmann, Felix A. and Engbert, Ralf}, title = {Spatial statistics for gaze patterns in scene viewing}, series = {Journal of vision}, volume = {19}, journal = {Journal of vision}, number = {5}, publisher = {Association for Research in Vision and Opthalmology}, address = {Rockville}, issn = {1534-7362}, doi = {10.1167/19.6.5}, pages = {1 -- 19}, year = {2019}, abstract = {Scene viewing is used to study attentional selection in complex but still controlled environments. One of the main observations on eye movements during scene viewing is the inhomogeneous distribution of fixation locations: While some parts of an image are fixated by almost all observers and are inspected repeatedly by the same observer, other image parts remain unfixated by observers even after long exploration intervals. Here, we apply spatial point process methods to investigate the relationship between pairs of fixations. More precisely, we use the pair correlation function, a powerful statistical tool, to evaluate dependencies between fixation locations along individual scanpaths. We demonstrate that aggregation of fixation locations within 4 degrees is stronger than expected from chance. Furthermore, the pair correlation function reveals stronger aggregation of fixations when the same image is presented a second time. We use simulations of a dynamical model to show that a narrower spatial attentional span may explain differences in pair correlations between the first and the second inspection of the same image.}, language = {en} }