@misc{ChandraKruegelEngbert2020, author = {Chandra, Johan and Kr{\"u}gel, Andr{\´e} and Engbert, Ralf}, title = {Modulation of oculomotor control during reading of mirrored and inverted texts}, series = {Postprints der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, number = {659}, issn = {1866-8364}, doi = {10.25932/publishup-49487}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-494879}, pages = {17}, year = {2020}, abstract = {The interplay between cognitive and oculomotor processes during reading can be explored when the spatial layout of text deviates from the typical display. In this study, we investigate various eye-movement measures during reading of text with experimentally manipulated layout (word-wise and letter-wise mirrored-reversed text as well as inverted and scrambled text). While typical findings (e.g., longer mean fixation times, shorter mean saccades lengths) in reading manipulated texts compared to normal texts were reported in earlier work, little is known about changes of oculomotor targeting observed in within-word landing positions under the above text layouts. Here we carry out precise analyses of landing positions and find substantial changes in the so-called launch-site effect in addition to the expected overall slow-down of reading performance. Specifically, during reading of our manipulated text conditions with reversed letter order (against overall reading direction), we find a reduced launch-site effect, while in all other manipulated text conditions, we observe an increased launch-site effect. Our results clearly indicate that the oculomotor system is highly adaptive when confronted with unusual reading conditions.}, language = {en} } @misc{BackhausEngbertRothkegeletal.2020, author = {Backhaus, Daniel and Engbert, Ralf and Rothkegel, Lars Oliver Martin and Trukenbrod, Hans Arne}, title = {Task-dependence in scene perception: Head unrestrained viewing using mobile eye-tracking}, series = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, journal = {Zweitver{\"o}ffentlichungen der Universit{\"a}t Potsdam : Humanwissenschaftliche Reihe}, number = {5}, issn = {1866-8364}, doi = {10.25932/publishup-51912}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-519124}, pages = {23}, year = {2020}, abstract = {Real-world scene perception is typically studied in the laboratory using static picture viewing with restrained head position. Consequently, the transfer of results obtained in this paradigm to real-word scenarios has been questioned. The advancement of mobile eye-trackers and the progress in image processing, however, permit a more natural experimental setup that, at the same time, maintains the high experimental control from the standard laboratory setting. We investigated eye movements while participants were standing in front of a projector screen and explored images under four specific task instructions. Eye movements were recorded with a mobile eye-tracking device and raw gaze data were transformed from head-centered into image-centered coordinates. We observed differences between tasks in temporal and spatial eye-movement parameters and found that the bias to fixate images near the center differed between tasks. Our results demonstrate that current mobile eye-tracking technology and a highly controlled design support the study of fine-scaled task dependencies in an experimental setting that permits more natural viewing behavior than the static picture viewing paradigm.}, language = {en} }