@article{NowickiBindschadlerAbeOuchietal.2013, author = {Nowicki, Sophie and Bindschadler, Robert A. and Abe-Ouchi, Ayako and Aschwanden, Andy and Bueler, Ed and Choi, Hyeungu and Fastook, Jim and Granzow, Glen and Greve, Ralf and Gutowski, Gail and Herzfeld, Ute and Jackson, Charles and Johnson, Jesse and Khroulev, Constantine and Larour, Eric and Levermann, Anders and Lipscomb, William H. and Martin, Maria A. and Morlighem, Mathieu and Parizek, Byron R. and Pollard, David and Price, Stephen F. and Ren, Diandong and Rignot, Eric and Saito, Fuyuki and Sato, Tatsuru and Seddik, Hakime and Seroussi, Helene and Takahashi, Kunio and Walker, Ryan and Wang, Wei Li}, title = {Insights into spatial sensitivities of ice mass response to environmental change from the SeaRISE ice sheet modeling project II Greenland}, series = {Journal of geophysical research : Earth surface}, volume = {118}, journal = {Journal of geophysical research : Earth surface}, number = {2}, publisher = {American Geophysical Union}, address = {Washington}, issn = {2169-9003}, doi = {10.1002/jgrf.20076}, pages = {1025 -- 1044}, year = {2013}, abstract = {The Sea-level Response to Ice Sheet Evolution (SeaRISE) effort explores the sensitivity of the current generation of ice sheet models to external forcing to gain insight into the potential future contribution to sea level from the Greenland and Antarctic ice sheets. All participating models simulated the ice sheet response to three types of external forcings: a change in oceanic condition, a warmer atmospheric environment, and enhanced basal lubrication. Here an analysis of the spatial response of the Greenland ice sheet is presented, and the impact of model physics and spin-up on the projections is explored. Although the modeled responses are not always homogeneous, consistent spatial trends emerge from the ensemble analysis, indicating distinct vulnerabilities of the Greenland ice sheet. There are clear response patterns associated with each forcing, and a similar mass loss at the full ice sheet scale will result in different mass losses at the regional scale, as well as distinct thickness changes over the ice sheet. All forcings lead to an increased mass loss for the coming centuries, with increased basal lubrication and warmer ocean conditions affecting mainly outlet glaciers, while the impacts of atmospheric forcings affect the whole ice sheet.}, language = {en} } @phdthesis{Holz2013, author = {Holz, Christian}, title = {3D from 2D touch}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-67796}, school = {Universit{\"a}t Potsdam}, year = {2013}, abstract = {While interaction with computers used to be dominated by mice and keyboards, new types of sensors now allow users to interact through touch, speech, or using their whole body in 3D space. These new interaction modalities are often referred to as "natural user interfaces" or "NUIs." While 2D NUIs have experienced major success on billions of mobile touch devices sold, 3D NUI systems have so far been unable to deliver a mobile form factor, mainly due to their use of cameras. The fact that cameras require a certain distance from the capture volume has prevented 3D NUI systems from reaching the flat form factor mobile users expect. In this dissertation, we address this issue by sensing 3D input using flat 2D sensors. The systems we present observe the input from 3D objects as 2D imprints upon physical contact. By sampling these imprints at very high resolutions, we obtain the objects' textures. In some cases, a texture uniquely identifies a biometric feature, such as the user's fingerprint. In other cases, an imprint stems from the user's clothing, such as when walking on multitouch floors. By analyzing from which part of the 3D object the 2D imprint results, we reconstruct the object's pose in 3D space. While our main contribution is a general approach to sensing 3D input on 2D sensors upon physical contact, we also demonstrate three applications of our approach. (1) We present high-accuracy touch devices that allow users to reliably touch targets that are a third of the size of those on current touch devices. We show that different users and 3D finger poses systematically affect touch sensing, which current devices perceive as random input noise. We introduce a model for touch that compensates for this systematic effect by deriving the 3D finger pose and the user's identity from each touch imprint. We then investigate this systematic effect in detail and explore how users conceptually touch targets. Our findings indicate that users aim by aligning visual features of their fingers with the target. We present a visual model for touch input that eliminates virtually all systematic effects on touch accuracy. (2) From each touch, we identify users biometrically by analyzing their fingerprints. Our prototype Fiberio integrates fingerprint scanning and a display into the same flat surface, solving a long-standing problem in human-computer interaction: secure authentication on touchscreens. Sensing 3D input and authenticating users upon touch allows Fiberio to implement a variety of applications that traditionally require the bulky setups of current 3D NUI systems. (3) To demonstrate the versatility of 3D reconstruction on larger touch surfaces, we present a high-resolution pressure-sensitive floor that resolves the texture of objects upon touch. Using the same principles as before, our system GravitySpace analyzes all imprints and identifies users based on their shoe soles, detects furniture, and enables accurate touch input using feet. By classifying all imprints, GravitySpace detects the users' body parts that are in contact with the floor and then reconstructs their 3D body poses using inverse kinematics. GravitySpace thus enables a range of applications for future 3D NUI systems based on a flat sensor, such as smart rooms in future homes. We conclude this dissertation by projecting into the future of mobile devices. Focusing on the mobility aspect of our work, we explore how NUI devices may one day augment users directly in the form of implanted devices.}, language = {en} }