@phdthesis{Yang2013, author = {Yang, Haojin}, title = {Automatic video indexing and retrieval using video ocr technology}, address = {Potsdam}, pages = {182 S.}, year = {2013}, language = {en} } @article{vanHoolandVerborghDeWildeetal.2013, author = {van Hooland, Seth and Verborgh, Ruben and De Wilde, Max and Hercher, Johannes and Mannens, Erik and Van de Walle, Rik}, title = {Evaluating the success of vocabulary reconciliation for cultural heritage collections}, series = {Journal of the American Society for Information Science and Technology}, volume = {64}, journal = {Journal of the American Society for Information Science and Technology}, number = {3}, publisher = {Wiley-Blackwell}, address = {Hoboken}, issn = {1532-2882}, doi = {10.1002/asi.22763}, pages = {464 -- 479}, year = {2013}, abstract = {The concept of Linked Data has made its entrance in the cultural heritage sector due to its potential use for the integration of heterogeneous collections and deriving additional value out of existing metadata. However, practitioners and researchers alike need a better understanding of what outcome they can reasonably expect of the reconciliation process between their local metadata and established controlled vocabularies which are already a part of the Linked Data cloud. This paper offers an in-depth analysis of how a locally developed vocabulary can be successfully reconciled with the Library of Congress Subject Headings (LCSH) and the Arts and Architecture Thesaurus (AAT) through the help of a general-purpose tool for interactive data transformation (OpenRefine). Issues negatively affecting the reconciliation process are identified and solutions are proposed in order to derive maximum value from existing metadata and controlled vocabularies in an automated manner.}, language = {en} } @article{SteinertCassouHirschfeld2013, author = {Steinert, Bastian and Cassou, Damien and Hirschfeld, Robert}, title = {CoExist overcoming aversion to change preserving immediate access to source code and run-time information of previous development states}, series = {ACM SIGPLAN notices}, volume = {48}, journal = {ACM SIGPLAN notices}, number = {2}, publisher = {Association for Computing Machinery}, address = {New York}, issn = {0362-1340}, doi = {10.1145/2480360.2384591}, pages = {107 -- 117}, year = {2013}, abstract = {Programmers make many changes to the program to eventually find a good solution for a given task. In this course of change, every intermediate development state can of value, when, for example, a promising ideas suddenly turn out inappropriate or the interplay of objects turns out more complex than initially expected before making changes. Programmers would benefit from tool support that provides immediate access to source code and run-time of previous development states of interest. We present IDE extensions, implemented for Squeak/Smalltalk, to preserve, retrieve, and work with this information. With such tool support, programmers can work without worries because they can rely on tools that help them with whatever their explorations will reveal. They no longer have to follow certain best practices only to avoid undesired consequences of changing code.}, language = {en} } @phdthesis{Schaffner2013, author = {Schaffner, Jan}, title = {Multi tenancy for cloud-based in-memory column databases : workload management and data placement}, address = {Potsdam}, pages = {135 S.}, year = {2013}, language = {en} } @article{SawadeBickelvonOertzenetal.2013, author = {Sawade, Christoph and Bickel, Steffen and von Oertzen, Timo and Scheffer, Tobias and Landwehr, Niels}, title = {Active evaluation of ranking functions based on graded relevance}, series = {Machine learning}, volume = {92}, journal = {Machine learning}, number = {1}, publisher = {Springer}, address = {Dordrecht}, issn = {0885-6125}, doi = {10.1007/s10994-013-5372-5}, pages = {41 -- 64}, year = {2013}, abstract = {Evaluating the quality of ranking functions is a core task in web search and other information retrieval domains. Because query distributions and item relevance change over time, ranking models often cannot be evaluated accurately on held-out training data. Instead, considerable effort is spent on manually labeling the relevance of query results for test queries in order to track ranking performance. We address the problem of estimating ranking performance as accurately as possible on a fixed labeling budget. Estimates are based on a set of most informative test queries selected by an active sampling distribution. Query labeling costs depend on the number of result items as well as item-specific attributes such as document length. We derive cost-optimal sampling distributions for the commonly used performance measures Discounted Cumulative Gain and Expected Reciprocal Rank. Experiments on web search engine data illustrate significant reductions in labeling costs.}, language = {en} } @article{RichterKyprianidisDoellner2013, author = {Richter, Rico and Kyprianidis, Jan Eric and D{\"o}llner, J{\"u}rgen Roland Friedrich}, title = {Out-of-core GPU-based change detection in massive 3D point clouds}, series = {Transactions in GIS}, volume = {17}, journal = {Transactions in GIS}, number = {5}, publisher = {Wiley-Blackwell}, address = {Hoboken}, issn = {1361-1682}, doi = {10.1111/j.1467-9671.2012.01362.x}, pages = {724 -- 741}, year = {2013}, abstract = {If sites, cities, and landscapes are captured at different points in time using technology such as LiDAR, large collections of 3D point clouds result. Their efficient storage, processing, analysis, and presentation constitute a challenging task because of limited computation, memory, and time resources. In this work, we present an approach to detect changes in massive 3D point clouds based on an out-of-core spatial data structure that is designed to store data acquired at different points in time and to efficiently attribute 3D points with distance information. Based on this data structure, we present and evaluate different processing schemes optimized for performing the calculation on the CPU and GPU. In addition, we present a point-based rendering technique adapted for attributed 3D point clouds, to enable effective out-of-core real-time visualization of the computation results. Our approach enables conclusions to be drawn about temporal changes in large highly accurate 3D geodata sets of a captured area at reasonable preprocessing and rendering times. We evaluate our approach with two data sets from different points in time for the urban area of a city, describe its characteristics, and report on applications.}, language = {en} } @phdthesis{Phusanga2013, author = {Phusanga, Dara}, title = {Derived algebraic systems}, address = {Potsdam}, pages = {81 S.}, year = {2013}, language = {en} } @phdthesis{Neumann2013, author = {Neumann, Stefan}, title = {Modular timing analysis of component-based real-time embedded systems}, address = {Potsdam}, pages = {218 S.}, year = {2013}, language = {en} } @article{MontavonBraunKruegeretal.2013, author = {Montavon, Gregoire and Braun, Mikio L. and Kr{\"u}ger, Tammo and M{\"u}ller, Klaus-Robert}, title = {Analyzing local structure in Kernel-Based learning}, series = {IEEE signal processing magazine}, volume = {30}, journal = {IEEE signal processing magazine}, number = {4}, publisher = {Inst. of Electr. and Electronics Engineers}, address = {Piscataway}, issn = {1053-5888}, doi = {10.1109/MSP.2013.2249294}, pages = {62 -- 74}, year = {2013}, language = {en} } @unpublished{MargariaHinchey2013, author = {Margaria, Tiziana and Hinchey, Mike}, title = {Simplicity in IT - the power of less}, series = {Computer : innovative technology for computer professionals}, volume = {46}, journal = {Computer : innovative technology for computer professionals}, number = {11}, publisher = {Inst. of Electr. and Electronics Engineers}, address = {Los Alamitos}, issn = {0018-9162}, doi = {10.1109/MC.2013.397}, pages = {23 -- 25}, year = {2013}, abstract = {Simplicity is a mindset, a way of looking at solutions, an extremely wide-ranging philosophical stance on the world, and thus a deeply rooted cultural paradigm. The culture of "less" can be profoundly disruptive, cutting out existing "standard" elements from products and business models, thereby revolutionizing entire markets.}, language = {en} } @article{KyprianidisCollomosseWangetal.2013, author = {Kyprianidis, Jan Eric and Collomosse, John and Wang, Tinghuai and Isenberg, Tobias}, title = {State of the "Art" a taxonomy of artistic stylization techniques for images and video}, series = {IEEE transactions on visualization and computer graphics}, volume = {19}, journal = {IEEE transactions on visualization and computer graphics}, number = {5}, publisher = {Inst. of Electr. and Electronics Engineers}, address = {Los Alamitos}, issn = {1077-2626}, doi = {10.1109/TVCG.2012.160}, pages = {866 -- 885}, year = {2013}, abstract = {This paper surveys the field of nonphotorealistic rendering (NPR), focusing on techniques for transforming 2D input (images and video) into artistically stylized renderings. We first present a taxonomy of the 2D NPR algorithms developed over the past two decades, structured according to the design characteristics and behavior of each technique. We then describe a chronology of development from the semiautomatic paint systems of the early nineties, through to the automated painterly rendering systems of the late nineties driven by image gradient analysis. Two complementary trends in the NPR literature are then addressed, with reference to our taxonomy. First, the fusion of higher level computer vision and NPR, illustrating the trends toward scene analysis to drive artistic abstraction and diversity of style. Second, the evolution of local processing approaches toward edge-aware filtering for real-time stylization of images and video. The survey then concludes with a discussion of open challenges for 2D NPR identified in recent NPR symposia, including topics such as user and aesthetic evaluation.}, language = {en} } @article{KrasnovaVeltriSpengleretal.2013, author = {Krasnova, Hanna and Veltri, Natasha F. and Spengler, Klaus and G{\"u}nther, Oliver}, title = {"Deal of the Day" Platforms what drives Consumer loyalty?}, series = {Business \& information systems engineering : the international journal of Wirtschaftsinformatik}, volume = {5}, journal = {Business \& information systems engineering : the international journal of Wirtschaftsinformatik}, number = {3}, publisher = {Springer}, address = {Heidelberg}, issn = {1867-0202}, doi = {10.1007/s12599-013-0268-2}, pages = {165 -- 177}, year = {2013}, abstract = {"Deal of the Day" (DoD) platforms have quickly become popular by offering savings on local services, products and vacations. For merchants, these platforms represent a new marketing channel to advertise their products and services and attract new customers. DoD platform providers, however, struggle to maintaining a stable market share and profitability, because entry and switching costs are low. To sustain a competitive market position, DoD providers are looking for ways to build a loyal customer base. However, research examining the determinants of user loyalty in this novel context is scarce. To fill this gap, this study employs Grounded Theory methodology to develop a conceptual model of customer loyalty to a DoD provider. In the next step, qualitative insights are enriched and validated using quantitative data from a survey of 202 DoD users. The authors find that customer loyalty is in large part driven by monetary incentives, but can be eroded if impressions from merchant encounters are below expectations. In addition, enhancing the share of deals relevant for consumers, i.e. signal-to-noise ratio, and mitigating perceived risks of a transaction emerge as challenges. Beyond theoretical value, the results offer practical insights into how customer loyalty to a DoD provider can be promoted.}, language = {en} } @book{KnobelsdorfKreitz2013, author = {Knobelsdorf, Maria and Kreitz, Christoph}, title = {Ein konstruktivistischer Lehransatz f{\"u}r die Einf{\"u}hrungsveranstaltung der Theoretische Informatik}, isbn = {978-3-86956-220-9}, year = {2013}, language = {de} } @article{KiertscherZinkeSchnor2013, author = {Kiertscher, Simon and Zinke, J{\"o}rg and Schnor, Bettina}, title = {CHERUB power consumption aware cluster resource management}, series = {Cluster computing : the journal of networks, software tools and applications}, volume = {16}, journal = {Cluster computing : the journal of networks, software tools and applications}, number = {1}, publisher = {Springer}, address = {New York}, issn = {1386-7857}, doi = {10.1007/s10586-011-0176-5}, pages = {55 -- 63}, year = {2013}, abstract = {This paper presents an evaluation of ACPI energy saving modes, and deduces the design and implementation of an energy saving daemon for clusters called cherub. The design of the cherub daemon is modular and extensible. Since the only requirement is a central approach for resource management, cherub is suited for Server Load Balancing (SLB) clusters managed by dispatchers like Linux Virtual Server (LVS), as well as for High Performance Computing (HPC) clusters. Our experimental results show that cherub's scheduling algorithm works well, i.e. it will save energy, if possible, and avoids state-flapping.}, language = {en} } @article{KaminskiSchaubSiegeletal.2013, author = {Kaminski, Roland and Schaub, Torsten H. and Siegel, Anne and Videla, Santiago}, title = {Minimal intervention strategies in logical signaling networks with ASP}, series = {Theory and practice of logic programming}, volume = {13}, journal = {Theory and practice of logic programming}, publisher = {Cambridge Univ. Press}, address = {New York}, issn = {1471-0684}, doi = {10.1017/S1471068413000422}, pages = {675 -- 690}, year = {2013}, abstract = {Proposing relevant perturbations to biological signaling networks is central to many problems in biology and medicine because it allows for enabling or disabling certain biological outcomes. In contrast to quantitative methods that permit fine-grained (kinetic) analysis, qualitative approaches allow for addressing large-scale networks. This is accomplished by more abstract representations such as logical networks. We elaborate upon such a qualitative approach aiming at the computation of minimal interventions in logical signaling networks relying on Kleene's three-valued logic and fixpoint semantics. We address this problem within answer set programming and show that it greatly outperforms previous work using dedicated algorithms.}, language = {en} } @phdthesis{Hentschel2013, author = {Hentschel, Uwe}, title = {Steuerung der Daten{\"u}bertragung in {\"o}ffentlichen zellularen Funknetzen im Kontext telemedizinischer Anwendung}, address = {Potsdam}, pages = {224 S.}, year = {2013}, language = {de} } @article{GuziolowskiVidelaEduatietal.2013, author = {Guziolowski, Carito and Videla, Santiago and Eduati, Federica and Thiele, Sven and Cokelaer, Thomas and Siegel, Anne and Saez-Rodriguez, Julio}, title = {Exhaustively characterizing feasible logic models of a signaling network using Answer Set Programming}, series = {Bioinformatics}, volume = {29}, journal = {Bioinformatics}, number = {18}, publisher = {Oxford Univ. Press}, address = {Oxford}, issn = {1367-4803}, doi = {10.1093/bioinformatics/btt393}, pages = {2320 -- 2326}, year = {2013}, abstract = {Motivation: Logic modeling is a useful tool to study signal transduction across multiple pathways. Logic models can be generated by training a network containing the prior knowledge to phospho-proteomics data. The training can be performed using stochastic optimization procedures, but these are unable to guarantee a global optima or to report the complete family of feasible models. This, however, is essential to provide precise insight in the mechanisms underlaying signal transduction and generate reliable predictions. Results: We propose the use of Answer Set Programming to explore exhaustively the space of feasible logic models. Toward this end, we have developed caspo, an open-source Python package that provides a powerful platform to learn and characterize logic models by leveraging the rich modeling language and solving technologies of Answer Set Programming. We illustrate the usefulness of caspo by revisiting a model of pro-growth and inflammatory pathways in liver cells. We show that, if experimental error is taken into account, there are thousands (11 700) of models compatible with the data. Despite the large number, we can extract structural features from the models, such as links that are always (or never) present or modules that appear in a mutual exclusive fashion. To further characterize this family of models, we investigate the input-output behavior of the models. We find 91 behaviors across the 11 700 models and we suggest new experiments to discriminate among them. Our results underscore the importance of characterizing in a global and exhaustive manner the family of feasible models, with important implications for experimental design.}, language = {en} } @phdthesis{Gros2013, author = {Gros, Oliver}, title = {Computergest{\"u}tzte Wissensextraktion aus befundtexten der Pathologie}, address = {Potsdam}, pages = {170 S.}, year = {2013}, language = {de} } @article{GebserSchaub2013, author = {Gebser, Martin and Schaub, Torsten H.}, title = {Tableau calculi for logic programs under answer set semantics}, series = {ACM transactions on computational logic}, volume = {14}, journal = {ACM transactions on computational logic}, number = {2}, publisher = {Association for Computing Machinery}, address = {New York}, issn = {1529-3785}, doi = {10.1145/2480759.2480767}, pages = {40}, year = {2013}, abstract = {We introduce formal proof systems based on tableau methods for analyzing computations in Answer Set Programming (ASP). Our approach furnishes fine-grained instruments for characterizing operations as well as strategies of ASP solvers. The granularity is detailed enough to capture a variety of propagation and choice methods of algorithms used for ASP solving, also incorporating SAT-based and conflict-driven learning approaches to some extent. This provides us with a uniform setting for identifying and comparing fundamental properties of ASP solving approaches. In particular, we investigate their proof complexities and show that the run-times of best-case computations can vary exponentially between different existing ASP solvers. Apart from providing a framework for comparing ASP solving approaches, our characterizations also contribute to their understanding by pinning down the constitutive atomic operations. Furthermore, our framework is flexible enough to integrate new inference patterns, and so to study their relation to existing ones. To this end, we generalize our approach and provide an extensible basis aiming at a modular incorporation of additional language constructs. This is exemplified by augmenting our basic tableau methods with cardinality constraints and disjunctions.}, language = {en} } @article{FloydBosselmann2013, author = {Floyd, Barry D. and Bosselmann, Steve}, title = {ITSy-simplicity research in information and communication technology}, series = {Computer : innovative technology for computer professionals}, volume = {46}, journal = {Computer : innovative technology for computer professionals}, number = {11}, publisher = {Inst. of Electr. and Electronics Engineers}, address = {Los Alamitos}, issn = {0018-9162}, pages = {26 -- 32}, year = {2013}, abstract = {Basic to information and communication technology design, simplicity as a driving concept receives little formal attention from the ICT community. A recent literature review and survey of scholars, researchers, and practitioners conducted through the Information Technology Simply Works (ITSy) European Support Action reveals key findings about current perceptions of and future directions for simplicity in ICT.}, language = {en} }