@misc{CajarSchneeweissEngelbertetal.2016, author = {Cajar, Anke and Schneeweiß, Paul and Engelbert, Ralf and Laubrock, Jochen}, title = {Coupling of attention and saccades when viewing scenes with central and peripheral degradation}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-394918}, pages = {19}, year = {2016}, abstract = {Degrading real-world scenes in the central or the peripheral visual field yields a characteristic pattern: Mean saccade amplitudes increase with central and decrease with peripheral degradation. Does this pattern reflect corresponding modulations of selective attention? If so, the observed saccade amplitude pattern should reflect more focused attention in the central region with peripheral degradation and an attentional bias toward the periphery with central degradation. To investigate this hypothesis, we measured the detectability of peripheral (Experiment 1) or central targets (Experiment 2) during scene viewing when low or high spatial frequencies were gaze-contingently filtered in the central or the peripheral visual field. Relative to an unfiltered control condition, peripheral filtering induced a decrease of the detection probability for peripheral but not for central targets (tunnel vision). Central filtering decreased the detectability of central but not of peripheral targets. Additional post hoc analyses are compatible with the interpretation that saccade amplitudes and direction are computed in partial independence. Our experimental results indicate that task-induced modulations of saccade amplitudes reflect attentional modulations.}, language = {en} } @article{CajarSchneeweissEngbertetal.2016, author = {Cajar, Anke and Schneeweiß, Paul and Engbert, Ralf and Laubrock, Jochen}, title = {Coupling of attention and saccades when viewing scenes with central and peripheral degradation}, series = {Journal of Vision}, volume = {16}, journal = {Journal of Vision}, number = {2}, publisher = {ARVO}, address = {Rockville, Md.}, issn = {1534-7362}, doi = {10.1167/16.2.8}, pages = {1 -- 19}, year = {2016}, abstract = {Degrading real-world scenes in the central or the peripheral visual field yields a characteristic pattern: Mean saccade amplitudes increase with central and decrease with peripheral degradation. Does this pattern reflect corresponding modulations of selective attention? If so, the observed saccade amplitude pattern should reflect more focused attention in the central region with peripheral degradation and an attentional bias toward the periphery with central degradation. To investigate this hypothesis, we measured the detectability of peripheral (Experiment 1) or central targets (Experiment 2) during scene viewing when low or high spatial frequencies were gaze-contingently filtered in the central or the peripheral visual field. Relative to an unfiltered control condition, peripheral filtering induced a decrease of the detection probability for peripheral but not for central targets (tunnel vision). Central filtering decreased the detectability of central but not of peripheral targets. Additional post hoc analyses are compatible with the interpretation that saccade amplitudes and direction are computed in partial independence. Our experimental results indicate that task-induced modulations of saccade amplitudes reflect attentional modulations.}, language = {en} } @phdthesis{Rothkegel2018, author = {Rothkegel, Lars Oliver Martin}, title = {Human scanpaths in natural scene viewing and natural scene search}, doi = {10.25932/publishup-42000}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-420005}, school = {Universit{\"a}t Potsdam}, pages = {xv, 131}, year = {2018}, abstract = {Understanding how humans move their eyes is an important part for understanding the functioning of the visual system. Analyzing eye movements from observations of natural scenes on a computer screen is a step to understand human visual behavior in the real world. When analyzing eye-movement data from scene-viewing experiments, the impor- tant questions are where (fixation locations), how long (fixation durations) and when (ordering of fixations) participants fixate on an image. By answering these questions, computational models can be developed which predict human scanpaths. Models serve as a tool to understand the underlying cognitive processes while observing an image, especially the allocation of visual attention. The goal of this thesis is to provide new contributions to characterize and model human scanpaths on natural scenes. The results from this thesis will help to understand and describe certain systematic eye-movement tendencies, which are mostly independent of the image. One eye-movement tendency I focus on throughout this thesis is the tendency to fixate more in the center of an image than on the outer parts, called the central fixation bias. Another tendency, which I will investigate thoroughly, is the characteristic distribution of angles between successive eye movements. The results serve to evaluate and improve a previously published model of scanpath generation from our laboratory, the SceneWalk model. Overall, six experiments were conducted for this thesis which led to the following five core results: i) A spatial inhibition of return can be found in scene-viewing data. This means that locations which have already been fixated are afterwards avoided for a certain time interval (Chapter 2). ii) The initial fixation position when observing an image has a long-lasting influence of up to five seconds on further scanpath progression (Chapter 2 \& 3). iii) The often described central fixation bias on images depends strongly on the duration of the initial fixation. Long-lasting initial fixations lead to a weaker central fixation bias than short fixations (Chapter 2 \& 3). iv) Human observers adjust their basic eye-movement parameters, like fixation dura- tions and saccade amplitudes, to the visual properties of a target they look for in visual search (Chapter 4). v) The angle between two adjacent saccades is an indicator for the selectivity of the upcoming saccade target (Chapter 4). All results emphasize the importance of systematic behavioral eye-movement tenden- cies and dynamic aspects of human scanpaths in scene viewing.}, language = {en} } @article{TrukenbrodBarthelmeWichmannetal.2019, author = {Trukenbrod, Hans Arne and Barthelme, Simon and Wichmann, Felix A. and Engbert, Ralf}, title = {Spatial statistics for gaze patterns in scene viewing}, series = {Journal of vision}, volume = {19}, journal = {Journal of vision}, number = {5}, publisher = {Association for Research in Vision and Opthalmology}, address = {Rockville}, issn = {1534-7362}, doi = {10.1167/19.6.5}, pages = {1 -- 19}, year = {2019}, abstract = {Scene viewing is used to study attentional selection in complex but still controlled environments. One of the main observations on eye movements during scene viewing is the inhomogeneous distribution of fixation locations: While some parts of an image are fixated by almost all observers and are inspected repeatedly by the same observer, other image parts remain unfixated by observers even after long exploration intervals. Here, we apply spatial point process methods to investigate the relationship between pairs of fixations. More precisely, we use the pair correlation function, a powerful statistical tool, to evaluate dependencies between fixation locations along individual scanpaths. We demonstrate that aggregation of fixation locations within 4 degrees is stronger than expected from chance. Furthermore, the pair correlation function reveals stronger aggregation of fixations when the same image is presented a second time. We use simulations of a dynamical model to show that a narrower spatial attentional span may explain differences in pair correlations between the first and the second inspection of the same image.}, language = {en} } @article{CajarEngbertLaubrock2020, author = {Cajar, Anke and Engbert, Ralf and Laubrock, Jochen}, title = {How spatial frequencies and color drive object search in real-world scenes}, series = {Journal of vision}, volume = {20}, journal = {Journal of vision}, number = {7}, publisher = {Association for Research in Vision and Opthalmology}, address = {Rockville}, issn = {1534-7362}, doi = {10.1167/jov.20.7.8}, pages = {16}, year = {2020}, abstract = {When studying how people search for objects in scenes, the inhomogeneity of the visual field is often ignored. Due to physiological limitations, peripheral vision is blurred and mainly uses coarse-grained information (i.e., low spatial frequencies) for selecting saccade targets, whereas high-acuity central vision uses fine-grained information (i.e., high spatial frequencies) for analysis of details. Here we investigated how spatial frequencies and color affect object search in real-world scenes. Using gaze-contingent filters, we attenuated high or low frequencies in central or peripheral vision while viewers searched color or grayscale scenes. Results showed that peripheral filters and central high-pass filters hardly affected search accuracy, whereas accuracy dropped drastically with central low-pass filters. Peripheral filtering increased the time to localize the target by decreasing saccade amplitudes and increasing number and duration of fixations. The use of coarse-grained information in the periphery was limited to color scenes. Central filtering increased the time to verify target identity instead, especially with low-pass filters. We conclude that peripheral vision is critical for object localization and central vision is critical for object identification. Visual guidance during peripheral object localization is dominated by low-frequency color information, whereas high-frequency information, relatively independent of color, is most important for object identification in central vision.}, language = {en} } @misc{BackhausEngbertRothkegeletal.2020, author = {Backhaus, Daniel and Engbert, Ralf and Rothkegel, Lars Oliver Martin and Trukenbrod, Hans Arne}, title = {Task-dependence in scene perception: Head unrestrained viewing using mobile eye-tracking}, series = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, journal = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, number = {5}, issn = {1866-8364}, doi = {10.25932/publishup-51912}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-519124}, pages = {23}, year = {2020}, abstract = {Real-world scene perception is typically studied in the laboratory using static picture viewing with restrained head position. Consequently, the transfer of results obtained in this paradigm to real-word scenarios has been questioned. The advancement of mobile eye-trackers and the progress in image processing, however, permit a more natural experimental setup that, at the same time, maintains the high experimental control from the standard laboratory setting. We investigated eye movements while participants were standing in front of a projector screen and explored images under four specific task instructions. Eye movements were recorded with a mobile eye-tracking device and raw gaze data were transformed from head-centered into image-centered coordinates. We observed differences between tasks in temporal and spatial eye-movement parameters and found that the bias to fixate images near the center differed between tasks. Our results demonstrate that current mobile eye-tracking technology and a highly controlled design support the study of fine-scaled task dependencies in an experimental setting that permits more natural viewing behavior than the static picture viewing paradigm.}, language = {en} } @article{BackhausEngbertRothkegeletal.2020, author = {Backhaus, Daniel and Engbert, Ralf and Rothkegel, Lars Oliver Martin and Trukenbrod, Hans Arne}, title = {Task-dependence in scene perception: Head unrestrained viewing using mobile eye-tracking}, series = {Journal of vision}, volume = {20}, journal = {Journal of vision}, number = {5}, publisher = {Association for Research in Vision and Opthalmology}, address = {Rockville}, issn = {1534-7362}, doi = {10.1167/jov.20.5.3}, pages = {1 -- 21}, year = {2020}, abstract = {Real-world scene perception is typically studied in the laboratory using static picture viewing with restrained head position. Consequently, the transfer of results obtained in this paradigm to real-word scenarios has been questioned. The advancement of mobile eye-trackers and the progress in image processing, however, permit a more natural experimental setup that, at the same time, maintains the high experimental control from the standard laboratory setting. We investigated eye movements while participants were standing in front of a projector screen and explored images under four specific task instructions. Eye movements were recorded with a mobile eye-tracking device and raw gaze data were transformed from head-centered into image-centered coordinates. We observed differences between tasks in temporal and spatial eye-movement parameters and found that the bias to fixate images near the center differed between tasks. Our results demonstrate that current mobile eye-tracking technology and a highly controlled design support the study of fine-scaled task dependencies in an experimental setting that permits more natural viewing behavior than the static picture viewing paradigm.}, language = {en} }