@article{HilseLucke2013, author = {Hilse, Michael and Lucke, Ulrike}, title = {eLiS - E-Learning in Studienbereichen}, series = {E-Learning Symposium 2012 : Aktuelle Anwendungen, innovative Prozesse und neueste Ergebnisse aus der E-Learning-Praxis ; Potsdam, 17. November 2012}, journal = {E-Learning Symposium 2012 : Aktuelle Anwendungen, innovative Prozesse und neueste Ergebnisse aus der E-Learning-Praxis ; Potsdam, 17. November 2012}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, doi = {10.25932/publishup-44227}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-442275}, pages = {73 -- 75}, year = {2013}, abstract = {Deutsche Universit{\"a}ten erweitern ihre E-Learning-Angebote als Service f{\"u}r die Studierenden und Lehrenden. Diese sind je nach Fakult{\"a}t unterschiedlich ausgepr{\"a}gt. Dieser Artikel zeigt, wie durch technische Erweiterung der Infrastruktur, einer Anpassung der Organisationsstruktur und einer gezielten Inhaltsentwicklung eine durchg{\"a}ngige und personalisierbare Lehr- und Lernumgebung (Personal Learning Environment, PLE) geschaffen und damit die Akzeptanz bei den Lehrenden und Studierenden f{\"u}r E-Learning erh{\"o}ht werden kann. Aus der vorausgehenden, systematischen Anforderungsanalyse k{\"o}nnen Kennzahlen f{\"u}r die Qualit{\"a}tssicherung von E-Learning-Angeboten abgeleitet werden.}, language = {de} } @article{SteinertCassouHirschfeld2013, author = {Steinert, Bastian and Cassou, Damien and Hirschfeld, Robert}, title = {CoExist overcoming aversion to change preserving immediate access to source code and run-time information of previous development states}, series = {ACM SIGPLAN notices}, volume = {48}, journal = {ACM SIGPLAN notices}, number = {2}, publisher = {Association for Computing Machinery}, address = {New York}, issn = {0362-1340}, doi = {10.1145/2480360.2384591}, pages = {107 -- 117}, year = {2013}, abstract = {Programmers make many changes to the program to eventually find a good solution for a given task. In this course of change, every intermediate development state can of value, when, for example, a promising ideas suddenly turn out inappropriate or the interplay of objects turns out more complex than initially expected before making changes. Programmers would benefit from tool support that provides immediate access to source code and run-time of previous development states of interest. We present IDE extensions, implemented for Squeak/Smalltalk, to preserve, retrieve, and work with this information. With such tool support, programmers can work without worries because they can rely on tools that help them with whatever their explorations will reveal. They no longer have to follow certain best practices only to avoid undesired consequences of changing code.}, language = {en} } @article{KiertscherZinkeSchnor2013, author = {Kiertscher, Simon and Zinke, J{\"o}rg and Schnor, Bettina}, title = {CHERUB power consumption aware cluster resource management}, series = {Cluster computing : the journal of networks, software tools and applications}, volume = {16}, journal = {Cluster computing : the journal of networks, software tools and applications}, number = {1}, publisher = {Springer}, address = {New York}, issn = {1386-7857}, doi = {10.1007/s10586-011-0176-5}, pages = {55 -- 63}, year = {2013}, abstract = {This paper presents an evaluation of ACPI energy saving modes, and deduces the design and implementation of an energy saving daemon for clusters called cherub. The design of the cherub daemon is modular and extensible. Since the only requirement is a central approach for resource management, cherub is suited for Server Load Balancing (SLB) clusters managed by dispatchers like Linux Virtual Server (LVS), as well as for High Performance Computing (HPC) clusters. Our experimental results show that cherub's scheduling algorithm works well, i.e. it will save energy, if possible, and avoids state-flapping.}, language = {en} } @article{vanHoolandVerborghDeWildeetal.2013, author = {van Hooland, Seth and Verborgh, Ruben and De Wilde, Max and Hercher, Johannes and Mannens, Erik and Van de Walle, Rik}, title = {Evaluating the success of vocabulary reconciliation for cultural heritage collections}, series = {Journal of the American Society for Information Science and Technology}, volume = {64}, journal = {Journal of the American Society for Information Science and Technology}, number = {3}, publisher = {Wiley-Blackwell}, address = {Hoboken}, issn = {1532-2882}, doi = {10.1002/asi.22763}, pages = {464 -- 479}, year = {2013}, abstract = {The concept of Linked Data has made its entrance in the cultural heritage sector due to its potential use for the integration of heterogeneous collections and deriving additional value out of existing metadata. However, practitioners and researchers alike need a better understanding of what outcome they can reasonably expect of the reconciliation process between their local metadata and established controlled vocabularies which are already a part of the Linked Data cloud. This paper offers an in-depth analysis of how a locally developed vocabulary can be successfully reconciled with the Library of Congress Subject Headings (LCSH) and the Arts and Architecture Thesaurus (AAT) through the help of a general-purpose tool for interactive data transformation (OpenRefine). Issues negatively affecting the reconciliation process are identified and solutions are proposed in order to derive maximum value from existing metadata and controlled vocabularies in an automated manner.}, language = {en} } @article{FabianKunzMuelleretal.2013, author = {Fabian, Benjamin and Kunz, Steffen and M{\"u}ller, Sebastian and G{\"u}nther, Oliver}, title = {Secure federation of semantic information services}, series = {Decision support systems : DSS ; the international journal}, volume = {55}, journal = {Decision support systems : DSS ; the international journal}, number = {1}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0167-9236}, doi = {10.1016/j.dss.2012.05.049}, pages = {385 -- 398}, year = {2013}, abstract = {fundamental challenge for product-lifecycle management in collaborative value networks is to utilize the vast amount of product information available from heterogeneous sources in order to improve business analytics, decision support, and processes. This becomes even more challenging if those sources are distributed across multiple organizations. Federations of semantic information services, combining service-orientation and semantic technologies, provide a promising solution for this problem. However, without proper measures to establish information security, companies will be reluctant to join an information federation, which could lead to serious adoption barriers. Following the design science paradigm, this paper presents general objectives and a process for designing a secure federation of semantic information services. Furthermore, new as well as established security measures are discussed. Here, our contributions include an access-control enforcement system for semantic information services and a process for modeling access-control policies across organizations. In addition, a comprehensive security architecture is presented. An implementation of the architecture in the context of an application scenario and several performance experiments demonstrate the practical viability of our approach.}, language = {en} } @article{KyprianidisCollomosseWangetal.2013, author = {Kyprianidis, Jan Eric and Collomosse, John and Wang, Tinghuai and Isenberg, Tobias}, title = {State of the "Art" a taxonomy of artistic stylization techniques for images and video}, series = {IEEE transactions on visualization and computer graphics}, volume = {19}, journal = {IEEE transactions on visualization and computer graphics}, number = {5}, publisher = {Inst. of Electr. and Electronics Engineers}, address = {Los Alamitos}, issn = {1077-2626}, doi = {10.1109/TVCG.2012.160}, pages = {866 -- 885}, year = {2013}, abstract = {This paper surveys the field of nonphotorealistic rendering (NPR), focusing on techniques for transforming 2D input (images and video) into artistically stylized renderings. We first present a taxonomy of the 2D NPR algorithms developed over the past two decades, structured according to the design characteristics and behavior of each technique. We then describe a chronology of development from the semiautomatic paint systems of the early nineties, through to the automated painterly rendering systems of the late nineties driven by image gradient analysis. Two complementary trends in the NPR literature are then addressed, with reference to our taxonomy. First, the fusion of higher level computer vision and NPR, illustrating the trends toward scene analysis to drive artistic abstraction and diversity of style. Second, the evolution of local processing approaches toward edge-aware filtering for real-time stylization of images and video. The survey then concludes with a discussion of open challenges for 2D NPR identified in recent NPR symposia, including topics such as user and aesthetic evaluation.}, language = {en} } @article{GebserSchaub2013, author = {Gebser, Martin and Schaub, Torsten H.}, title = {Tableau calculi for logic programs under answer set semantics}, series = {ACM transactions on computational logic}, volume = {14}, journal = {ACM transactions on computational logic}, number = {2}, publisher = {Association for Computing Machinery}, address = {New York}, issn = {1529-3785}, doi = {10.1145/2480759.2480767}, pages = {40}, year = {2013}, abstract = {We introduce formal proof systems based on tableau methods for analyzing computations in Answer Set Programming (ASP). Our approach furnishes fine-grained instruments for characterizing operations as well as strategies of ASP solvers. The granularity is detailed enough to capture a variety of propagation and choice methods of algorithms used for ASP solving, also incorporating SAT-based and conflict-driven learning approaches to some extent. This provides us with a uniform setting for identifying and comparing fundamental properties of ASP solving approaches. In particular, we investigate their proof complexities and show that the run-times of best-case computations can vary exponentially between different existing ASP solvers. Apart from providing a framework for comparing ASP solving approaches, our characterizations also contribute to their understanding by pinning down the constitutive atomic operations. Furthermore, our framework is flexible enough to integrate new inference patterns, and so to study their relation to existing ones. To this end, we generalize our approach and provide an extensible basis aiming at a modular incorporation of additional language constructs. This is exemplified by augmenting our basic tableau methods with cardinality constraints and disjunctions.}, language = {en} } @article{DelgrandeSchaubTompitsetal.2013, author = {Delgrande, James and Schaub, Torsten H. and Tompits, Hans and Woltran, Stefan}, title = {A model-theoretic approach to belief change in answer set programming}, series = {ACM transactions on computational logic}, volume = {14}, journal = {ACM transactions on computational logic}, number = {2}, publisher = {Association for Computing Machinery}, address = {New York}, issn = {1529-3785}, doi = {10.1145/2480759.2480766}, pages = {46}, year = {2013}, abstract = {We address the problem of belief change in (nonmonotonic) logic programming under answer set semantics. Our formal techniques are analogous to those of distance-based belief revision in propositional logic. In particular, we build upon the model theory of logic programs furnished by SE interpretations, where an SE interpretation is a model of a logic program in the same way that a classical interpretation is a model of a propositional formula. Hence we extend techniques from the area of belief revision based on distance between models to belief change in logic programs. We first consider belief revision: for logic programs P and Q, the goal is to determine a program R that corresponds to the revision of P by Q, denoted P * Q. We investigate several operators, including (logic program) expansion and two revision operators based on the distance between the SE models of logic programs. It proves to be the case that expansion is an interesting operator in its own right, unlike in classical belief revision where it is relatively uninteresting. Expansion and revision are shown to satisfy a suite of interesting properties; in particular, our revision operators satisfy all or nearly all of the AGM postulates for revision. We next consider approaches for merging a set of logic programs, P-1,...,P-n. Again, our formal techniques are based on notions of relative distance between the SE models of the logic programs. Two approaches are examined. The first informally selects for each program P-i those models of P-i that vary the least from models of the other programs. The second approach informally selects those models of a program P-0 that are closest to the models of programs P-1,...,P-n. In this case, P-0 can be thought of as a set of database integrity constraints. We examine these operators with regards to how they satisfy relevant postulate sets. Last, we present encodings for computing the revision as well as the merging of logic programs within the same logic programming framework. This gives rise to a direct implementation of our approach in terms of off-the-shelf answer set solvers. These encodings also reflect the fact that our change operators do not increase the complexity of the base formalism.}, language = {en} } @article{KrasnovaVeltriSpengleretal.2013, author = {Krasnova, Hanna and Veltri, Natasha F. and Spengler, Klaus and G{\"u}nther, Oliver}, title = {"Deal of the Day" Platforms what drives Consumer loyalty?}, series = {Business \& information systems engineering : the international journal of Wirtschaftsinformatik}, volume = {5}, journal = {Business \& information systems engineering : the international journal of Wirtschaftsinformatik}, number = {3}, publisher = {Springer}, address = {Heidelberg}, issn = {1867-0202}, doi = {10.1007/s12599-013-0268-2}, pages = {165 -- 177}, year = {2013}, abstract = {"Deal of the Day" (DoD) platforms have quickly become popular by offering savings on local services, products and vacations. For merchants, these platforms represent a new marketing channel to advertise their products and services and attract new customers. DoD platform providers, however, struggle to maintaining a stable market share and profitability, because entry and switching costs are low. To sustain a competitive market position, DoD providers are looking for ways to build a loyal customer base. However, research examining the determinants of user loyalty in this novel context is scarce. To fill this gap, this study employs Grounded Theory methodology to develop a conceptual model of customer loyalty to a DoD provider. In the next step, qualitative insights are enriched and validated using quantitative data from a survey of 202 DoD users. The authors find that customer loyalty is in large part driven by monetary incentives, but can be eroded if impressions from merchant encounters are below expectations. In addition, enhancing the share of deals relevant for consumers, i.e. signal-to-noise ratio, and mitigating perceived risks of a transaction emerge as challenges. Beyond theoretical value, the results offer practical insights into how customer loyalty to a DoD provider can be promoted.}, language = {en} } @article{DelikostidisEngelRetsiosetal.2013, author = {Delikostidis, Ioannis and Engel, Juri and Retsios, Bas and van Elzakker, Corne P. J. M. and Kraak, Menno-Jan and D{\"o}llner, J{\"u}rgen Roland Friedrich}, title = {Increasing the usability of pedestrian navigation interfaces by means of landmark visibility analysis}, series = {The journal of navigation}, volume = {66}, journal = {The journal of navigation}, number = {4}, publisher = {Cambridge Univ. Press}, address = {New York}, issn = {0373-4633}, doi = {10.1017/S0373463313000209}, pages = {523 -- 537}, year = {2013}, abstract = {Communicating location-specific information to pedestrians is a challenging task which can be aided by user-friendly digital technologies. In this paper, landmark visibility analysis, as a means for developing more usable pedestrian navigation systems, is discussed. Using an algorithmic framework for image-based 3D analysis, this method integrates a 3D city model with identified landmarks and produces raster visibility layers for each one. This output enables an Android phone prototype application to indicate the visibility of landmarks from the user's actual position. Tested in the field, the method achieves sufficient accuracy for the context of use and improves navigation efficiency and effectiveness.}, language = {en} }