@article{JannedyMendozaDenton2005, author = {Jannedy, Stefanie and Mendoza-Denton, Norma}, title = {Structuring information through gesture and intonation}, series = {Interdisciplinary studies on information structure : ISIS ; working papers of the SFB 632}, journal = {Interdisciplinary studies on information structure : ISIS ; working papers of the SFB 632}, number = {3}, issn = {1866-4725}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-8774}, pages = {199 -- 244}, year = {2005}, abstract = {Face-to-face communication is multimodal. In unscripted spoken discourse we can observe the interaction of several "semiotic layers", modalities of information such as syntax, discourse structure, gesture, and intonation. We explore the role of gesture and intonation in structuring and aligning information in spoken discourse through a study of the co-occurrence of pitch accents and gestural apices. Metaphorical spatialization through gesture also plays a role in conveying the contextual relationships between the speaker, the government and other external forces in a naturally-occurring political speech setting.}, language = {en} } @book{PfauSteinbach2006, author = {Pfau, Roland and Steinbach, Markus}, title = {Modality-independent and modality-specific aspects of grammaticalization in sign languages}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, isbn = {978-3-939469-53-7}, issn = {1864-1857}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-10886}, publisher = {Universit{\"a}t Potsdam}, pages = {97}, year = {2006}, abstract = {One type of internal diachronic change that has been extensively studied for spoken languages is grammaticalization whereby lexical elements develop into free or bound grammatical elements. Based on a wealth of spoken languages, a large amount of prototypical grammaticalization pathways has been identified. Moreover, it has been shown that desemanticization, decategorialization, and phonetic erosion are typical characteristics of grammaticalization processes. Not surprisingly, grammaticalization is also responsible for diachronic change in sign languages. Drawing data from a fair number of sign languages, we show that grammaticalization in visual-gestural languages - as far as the development from lexical to grammatical element is concerned - follows the same developmental pathways as in spoken languages. That is, the proposed pathways are modalityindependent. Besides these intriguing parallels, however, sign languages have the possibility of developing grammatical markers from manual and non-manual co-speech gestures. We will discuss various instances of grammaticalized gestures and we will also briefly address the issue of the modality-specificity of this phenomenon.}, language = {en} } @phdthesis{Gustafson2013, author = {Gustafson, Sean}, title = {Imaginary Interfaces}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-68960}, school = {Universit{\"a}t Potsdam}, year = {2013}, abstract = {The size of a mobile device is primarily determined by the size of the touchscreen. As such, researchers have found that the way to achieve ultimate mobility is to abandon the screen altogether. These wearable devices are operated using hand gestures, voice commands or a small number of physical buttons. By abandoning the screen these devices also abandon the currently dominant spatial interaction style (such as tapping on buttons), because, seemingly, there is nothing to tap on. Unfortunately this design prevents users from transferring their learned interaction knowledge gained from traditional touchscreen-based devices. In this dissertation, I present Imaginary Interfaces, which return spatial interaction to screenless mobile devices. With these interfaces, users point and draw in the empty space in front of them or on the palm of their hands. While they cannot see the results of their interaction, they obtain some visual and tactile feedback by watching and feeling their hands interact. After introducing the concept of Imaginary Interfaces, I present two hardware prototypes that showcase two different forms of interaction with an imaginary interface, each with its own advantages: mid-air imaginary interfaces can be large and expressive, while palm-based imaginary interfaces offer an abundance of tactile features that encourage learning. Given that imaginary interfaces offer no visual output, one of the key challenges is to enable users to discover the interface's layout. This dissertation offers three main solutions: offline learning with coordinates, browsing with audio feedback and learning by transfer. The latter I demonstrate with the Imaginary Phone, a palm-based imaginary interface that mimics the layout of a physical mobile phone that users are already familiar with. Although these designs enable interaction with Imaginary Interfaces, they tell us little about why this interaction is possible. In the final part of this dissertation, I present an exploration into which human perceptual abilities are used when interacting with a palm-based imaginary interface and how much each accounts for performance with the interface. These findings deepen our understanding of Imaginary Interfaces and suggest that palm-based Imaginary Interfaces can enable stand-alone eyes-free use for many applications, including interfaces for visually impaired users.}, language = {en} }