@misc{KubatovaHamannKubatetal.2019, author = {Kubatova, Brankica and Hamann, Wolf-Rainer and Kubat, Jiri and Oskinova, Lida}, title = {3D Monte Carlo Radiative Transfer in Inhomogeneous Massive Star Winds}, series = {Radiative signatures from the cosmos}, volume = {519}, journal = {Radiative signatures from the cosmos}, publisher = {Astronomical soc pacific}, address = {San Fransisco}, isbn = {978-1-58381-925-8}, issn = {1050-3390}, pages = {209 -- 212}, year = {2019}, abstract = {Already for decades it has been known that the winds of massive stars are inhomogeneous (i.e. clumped). To properly model observed spectra of massive star winds it is necessary to incorporate the 3-D nature of clumping into radiative transfer calculations. In this paper we present our full 3-D Monte Carlo radiative transfer code for inhomogeneous expanding stellar winds. We use a set of parameters to describe dense as well as the rarefied wind components. At the same time, we account for non-monotonic velocity fields. We show how the 3-D density and velocity wind inhomogeneities strongly affect the resonance line formation. We also show how wind clumping can solve the discrepancy between P v and H alpha mass-loss rate diagnostics.}, language = {en} } @misc{HalfpapSchlosser2019, author = {Halfpap, Stefan and Schlosser, Rainer}, title = {A Comparison of Allocation Algorithms for Partially Replicated Databases}, series = {2019 IEEE 35th International Conference on Data Engineering (ICDE)}, journal = {2019 IEEE 35th International Conference on Data Engineering (ICDE)}, publisher = {IEEE}, address = {New York}, isbn = {978-1-5386-7474-1}, issn = {1084-4627}, doi = {10.1109/ICDE.2019.00226}, pages = {2008 -- 2011}, year = {2019}, abstract = {Increasing demand for analytical processing capabilities can be managed by replication approaches. However, to evenly balance the replicas' workload shares while at the same time minimizing the data replication factor is a highly challenging allocation problem. As optimal solutions are only applicable for small problem instances, effective heuristics are indispensable. In this paper, we test and compare state-of-the-art allocation algorithms for partial replication. By visualizing and exploring their (heuristic) solutions for different benchmark workloads, we are able to derive structural insights and to detect an algorithm's strengths as well as its potential for improvement. Further, our application enables end-to-end evaluations of different allocations to verify their theoretical performance.}, language = {en} } @misc{MyachykovFischer2019, author = {Myachykov, Andriy and Fischer, Martin H.}, title = {A hierarchical view of abstractness}, series = {Physics of life reviews}, volume = {29}, journal = {Physics of life reviews}, publisher = {Elsevier}, address = {Amsterdam}, issn = {1571-0645}, doi = {10.1016/j.plrev.2019.04.005}, pages = {161 -- 163}, year = {2019}, language = {en} } @misc{GonzalezLopezPufahl2019, author = {Gonzalez-Lopez, Fernanda and Pufahl, Luise}, title = {A Landscape for Case Models}, series = {Enterprise, Business-Process and Information Systems Modeling}, volume = {352}, journal = {Enterprise, Business-Process and Information Systems Modeling}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-030-20618-5}, issn = {1865-1348}, doi = {10.1007/978-3-030-20618-5_6}, pages = {87 -- 102}, year = {2019}, abstract = {Case Management is a paradigm to support knowledge-intensive processes. The different approaches developed for modeling these types of processes tend to result in scattered models due to the low abstraction level at which the inherently complex processes are therein represented. Thus, readability and understandability is more challenging than that of traditional process models. By reviewing existing proposals in the field of process overviews and case models, this paper extends a case modeling language - the fragment-based Case Management (fCM) language - with the goal of modeling knowledge-intensive processes from a higher abstraction level - to generate a so-called fCM landscape. This proposal is empirically evaluated via an online experiment. Results indicate that interpreting an fCM landscape might be more effective and efficient than interpreting an informationally equivalent case model.}, language = {en} } @misc{AwasthiKaminskiRappetal.2019, author = {Awasthi, Swapnil and Kaminski, Jakob and Rapp, Michael Armin and Schlagenhauf, Florian and Walter, Henrik and Ruggeri, Barbara and Ripke, Stephan and Schumann, Gunter and Heinz, Andreas}, title = {A neural signature of malleability}, series = {European neuropsychopharmacology : the journal of the European College of Neuropsychopharmacology}, volume = {29}, journal = {European neuropsychopharmacology : the journal of the European College of Neuropsychopharmacology}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0924-977X}, doi = {10.1016/j.euroneuro.2017.08.139}, pages = {S858 -- S859}, year = {2019}, abstract = {General intelligence has a substantial genetic background in children, adolescents, and adults, but environmental factors also strongly correlate with cognitive performance as evidenced by a strong (up to one SD) increase in average intelligence test results in the second half of the previous century. This change occurred in a period apparently too short to accommodate radical genetic changes. It is highly suggestive that environmental factors interact with genotype by possible modification of epigenetic factors that regulate gene expression and thus contribute to individual malleability. This modification might as well be reflected in recent observations of an association between dopamine-dependent encoding of reward prediction errors and cognitive capacity, which was modulated by adverse life events.}, language = {en} } @misc{Richly2019, author = {Richly, Keven}, title = {A survey on trajectory data management for hybrid transactional and analytical workloads}, series = {IEEE International Conference on Big Data (Big Data)}, journal = {IEEE International Conference on Big Data (Big Data)}, publisher = {IEEE}, address = {New York}, isbn = {978-1-5386-5035-6}, issn = {2639-1589}, doi = {10.1109/BigData.2018.8622394}, pages = {562 -- 569}, year = {2019}, abstract = {Rapid advances in location-acquisition technologies have led to large amounts of trajectory data. This data is the foundation for a broad spectrum of services driven and improved by trajectory data mining. However, for hybrid transactional and analytical workloads, the storing and processing of rapidly accumulated trajectory data is a non-trivial task. In this paper, we present a detailed survey about state-of-the-art trajectory data management systems. To determine the relevant aspects and requirements for such systems, we developed a trajectory data mining framework, which summarizes the different steps in the trajectory data mining process. Based on the derived requirements, we analyze different concepts to store, compress, index, and process spatio-temporal data. There are various trajectory management systems, which are optimized for scalability, data footprint reduction, elasticity, or query performance. To get a comprehensive overview, we describe and compare different exciting systems. Additionally, the observed similarities in the general structure of different systems are consolidated in a general blueprint of trajectory management systems.}, language = {en} } @misc{HesseMatthiesSinzigetal.2019, author = {Hesse, Guenter and Matthies, Christoph and Sinzig, Werner and Uflacker, Matthias}, title = {Adding Value by Combining Business and Sensor Data}, series = {Database Systems for Advanced Applications}, volume = {11448}, journal = {Database Systems for Advanced Applications}, publisher = {Springer}, address = {Cham}, isbn = {978-3-030-18590-9}, issn = {0302-9743}, doi = {10.1007/978-3-030-18590-9_80}, pages = {528 -- 532}, year = {2019}, abstract = {Industry 4.0 and the Internet of Things are recent developments that have lead to the creation of new kinds of manufacturing data. Linking this new kind of sensor data to traditional business information is crucial for enterprises to take advantage of the data's full potential. In this paper, we present a demo which allows experiencing this data integration, both vertically between technical and business contexts and horizontally along the value chain. The tool simulates a manufacturing company, continuously producing both business and sensor data, and supports issuing ad-hoc queries that answer specific questions related to the business. In order to adapt to different environments, users can configure sensor characteristics to their needs.}, language = {en} } @misc{Matthies2019, author = {Matthies, Christoph}, title = {Agile process improvement in retrospectives}, series = {41st International Conference on Software Engineering: Companion Proceedings (ICSE-Companion)}, journal = {41st International Conference on Software Engineering: Companion Proceedings (ICSE-Companion)}, publisher = {IEEE}, address = {New York}, isbn = {978-1-7281-1764-5}, issn = {2574-1934}, doi = {10.1109/ICSE-Companion.2019.00063}, pages = {150 -- 152}, year = {2019}, abstract = {Working in iterations and repeatedly improving team workflows based on collected feedback is fundamental to agile software development processes. Scrum, the most popular agile method, provides dedicated retrospective meetings to reflect on the last development iteration and to decide on process improvement actions. However, agile methods do not prescribe how these improvement actions should be identified, managed or tracked in detail. The approaches to detect and remove problems in software development processes are therefore often based on intuition and prior experiences and perceptions of team members. Previous research in this area has focused on approaches to elicit a team's improvement opportunities as well as measurements regarding the work performed in an iteration, e.g. Scrum burn-down charts. Little research deals with the quality and nature of identified problems or how progress towards removing issues is measured. In this research, we investigate how agile development teams in the professional software industry organize their feedback and process improvement approaches. In particular, we focus on the structure and content of improvement and reflection meetings, i.e. retrospectives, and their outcomes. Researching how the vital mechanism of process improvement is implemented in practice in modern software development leads to a more complete picture of agile process improvement.}, language = {en} } @misc{Ette2019, author = {Ette, Ottmar}, title = {Alexander von Humboldt y Am{\´e}rica Latina}, series = {Iberoamericana}, volume = {19}, journal = {Iberoamericana}, number = {70}, publisher = {Vervuert}, address = {Frankfurt am Main}, issn = {1577-3388}, doi = {10.18441/ibam.19.2019.70.7-8}, pages = {7 -- 8}, year = {2019}, abstract = {Bas{\´a}ndose en el conjunto de la obra humboldtiana, desde sus comienzos hasta el Cosmos, este dossier trata de destacar la orientaci{\´o}n cosmopolita del sabio prusiano as{\´i} como, sobre todo, el fundamento americano de sus enfoques. El continente americano, para Humboldt, representa la diversidad de lo pensable y la multirrelacionalidad de lo imaginable: la llave para entender su cosmovisi{\´o}n.}, language = {es} } @misc{HeinickerLikavcanLin2019, author = {Heinicker, Paul and Likavcan, Lukas and Lin, Qiao}, title = {alt'ai: designing machine-to-machine interfaces for automated landscapes}, publisher = {Association for Computing Machinery}, address = {New York}, isbn = {978-1-4503-6311-2}, doi = {10.1145/3306211.3320146}, pages = {6}, year = {2019}, abstract = {alt'ai is an agent-based simulation inspired by aesthetics, culture and environmental conditions of the Altai mountain region on the borders between Russia, Kazakhstan, China and Mongolia. It is set into a scenario of a remote automated landscape populated by sentient machines, where biological species, machines and environments autonomously interact to produce unforeseeable visual outputs. It poses a question of designing future machine-to-machine authentication protocols that are based on the use of images encoding agent behavior. Also, the simulation provides rich visual perspective on this challenge. The project pleads for a heavily aestheticized approach to design practice and highlights the importance of productively inefficient and information redundant systems.}, language = {en} }