@article{ShekharReimannMayeretal.2021, author = {Shekhar, Sumit and Reimann, Max and Mayer, Maximilian and Semmo, Amir and Pasewaldt, Sebastian and D{\"o}llner, J{\"u}rgen and Trapp, Matthias}, title = {Interactive photo editing on smartphones via intrinsic decomposition}, series = {Computer graphics forum : journal of the European Association for Computer Graphics}, volume = {40}, journal = {Computer graphics forum : journal of the European Association for Computer Graphics}, publisher = {Blackwell}, address = {Oxford}, issn = {0167-7055}, doi = {10.1111/cgf.142650}, pages = {497 -- 510}, year = {2021}, abstract = {Intrinsic decomposition refers to the problem of estimating scene characteristics, such as albedo and shading, when one view or multiple views of a scene are provided. The inverse problem setting, where multiple unknowns are solved given a single known pixel-value, is highly under-constrained. When provided with correlating image and depth data, intrinsic scene decomposition can be facilitated using depth-based priors, which nowadays is easy to acquire with high-end smartphones by utilizing their depth sensors. In this work, we present a system for intrinsic decomposition of RGB-D images on smartphones and the algorithmic as well as design choices therein. Unlike state-of-the-art methods that assume only diffuse reflectance, we consider both diffuse and specular pixels. For this purpose, we present a novel specularity extraction algorithm based on a multi-scale intensity decomposition and chroma inpainting. At this, the diffuse component is further decomposed into albedo and shading components. We use an inertial proximal algorithm for non-convex optimization (iPiano) to ensure albedo sparsity. Our GPU-based visual processing is implemented on iOS via the Metal API and enables interactive performance on an iPhone 11 Pro. Further, a qualitative evaluation shows that we are able to obtain high-quality outputs. Furthermore, our proposed approach for specularity removal outperforms state-of-the-art approaches for real-world images, while our albedo and shading layer decomposition is faster than the prior work at a comparable output quality. Manifold applications such as recoloring, retexturing, relighting, appearance editing, and stylization are shown, each using the intrinsic layers obtained with our method and/or the corresponding depth data.}, language = {en} } @misc{ReimannKlingbeilPasewaldtetal.2018, author = {Reimann, Max and Klingbeil, Mandy and Pasewaldt, Sebastian and Semmo, Amir and Trapp, Matthias and D{\"o}llner, J{\"u}rgen Roland Friedrich}, title = {MaeSTrO: A Mobile App for Style Transfer Orchestration using Neural Networks}, series = {International Conference on Cyberworlds (CW)}, journal = {International Conference on Cyberworlds (CW)}, editor = {Sourin, A Sourina}, publisher = {IEEE}, address = {New York}, isbn = {978-1-5386-7315-7}, doi = {10.1109/CW.2018.00016}, pages = {9 -- 16}, year = {2018}, abstract = {Mobile expressive rendering gained increasing popularity among users seeking casual creativity by image stylization and supports the development of mobile artists as a new user group. In particular, neural style transfer has advanced as a core technology to emulate characteristics of manifold artistic styles. However, when it comes to creative expression, the technology still faces inherent limitations in providing low-level controls for localized image stylization. This work enhances state-of-the-art neural style transfer techniques by a generalized user interface with interactive tools to facilitate a creative and localized editing process. Thereby, we first propose a problem characterization representing trade-offs between visual quality, run-time performance, and user control. We then present MaeSTrO, a mobile app for orchestration of neural style transfer techniques using iterative, multi-style generative and adaptive neural networks that can be locally controlled by on-screen painting metaphors. At this, first user tests indicate different levels of satisfaction for the implemented techniques and interaction design.}, language = {en} } @article{ReimannKlingbeilPasewaldtetal.2019, author = {Reimann, Max and Klingbeil, Mandy and Pasewaldt, Sebastian and Semmo, Amir and Trapp, Matthias and D{\"o}llner, J{\"u}rgen Roland Friedrich}, title = {Locally controllable neural style transfer on mobile devices}, series = {The Visual Computer}, volume = {35}, journal = {The Visual Computer}, number = {11}, publisher = {Springer}, address = {New York}, issn = {0178-2789}, doi = {10.1007/s00371-019-01654-1}, pages = {1531 -- 1547}, year = {2019}, abstract = {Mobile expressive rendering gained increasing popularity among users seeking casual creativity by image stylization and supports the development of mobile artists as a new user group. In particular, neural style transfer has advanced as a core technology to emulate characteristics of manifold artistic styles. However, when it comes to creative expression, the technology still faces inherent limitations in providing low-level controls for localized image stylization. In this work, we first propose a problem characterization of interactive style transfer representing a trade-off between visual quality, run-time performance, and user control. We then present MaeSTrO, a mobile app for orchestration of neural style transfer techniques using iterative, multi-style generative and adaptive neural networks that can be locally controlled by on-screen painting metaphors. At this, we enhance state-of-the-art neural style transfer techniques by mask-based loss terms that can be interactively parameterized by a generalized user interface to facilitate a creative and localized editing process. We report on a usability study and an online survey that demonstrate the ability of our app to transfer styles at improved semantic plausibility.}, language = {en} } @article{PasewaldtSemmoTrappetal.2014, author = {Pasewaldt, Sebastian and Semmo, Amir and Trapp, Matthias and D{\"o}llner, J{\"u}rgen}, title = {Multi-perspective 3D panoramas}, series = {International journal of geographical information science}, volume = {28}, journal = {International journal of geographical information science}, number = {10}, publisher = {Routledge, Taylor \& Francis Group}, address = {Abingdon}, issn = {1365-8816}, doi = {10.1080/13658816.2014.922686}, pages = {2030 -- 2051}, year = {2014}, abstract = {This article presents multi-perspective 3D panoramas that focus on visualizing 3D geovirtual environments (3D GeoVEs) for navigation and exploration tasks. Their key element, a multi-perspective view (MPV), seamlessly combines what is seen from multiple viewpoints into a single image. This approach facilitates the presentation of information for virtual 3D city and landscape models, particularly by reducing occlusions, increasing screen-space utilization, and providing additional context within a single image. We complement MPVs with cartographic visualization techniques to stylize features according to their semantics and highlight important or prioritized information. When combined, both techniques constitute the core implementation of interactive, multi-perspective 3D panoramas. They offer a large number of effective means for visual communication of 3D spatial information, a high degree of customization with respect to cartographic design, and manifold applications in different domains. We discuss design decisions of 3D panoramas for the exploration of and navigation in 3D GeoVEs. We also discuss a preliminary user study that indicates that 3D panoramas are a promising approach for navigation systems using 3D GeoVEs.}, language = {en} }