@article{AlSaffar2013, author = {Al-Saffar, Loay Talib Ahmed}, title = {Where girls take the role of boys in CS}, series = {Commentarii informaticae didacticae : (CID)}, journal = {Commentarii informaticae didacticae : (CID)}, number = {5}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-65034}, pages = {149 -- 154}, year = {2013}, abstract = {A survey has been carried out in the Computer Science (CS) department at the University of Baghdad to investigate the attitudes of CS students in a female dominant environment, showing the differences between male and female students in different academic years. We also compare the attitudes of the freshman students of two different cultures (University of Baghdad, Iraq, and the University of Potsdam).}, language = {en} } @article{Arnold2007, author = {Arnold, Holger}, title = {A linearized DPLL calculus with learning}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-15421}, year = {2007}, abstract = {This paper describes the proof calculus LD for clausal propositional logic, which is a linearized form of the well-known DPLL calculus extended by clause learning. It is motivated by the demand to model how current SAT solvers built on clause learning are working, while abstracting from decision heuristics and implementation details. The calculus is proved sound and terminating. Further, it is shown that both the original DPLL calculus and the conflict-directed backtracking calculus with clause learning, as it is implemented in many current SAT solvers, are complete and proof-confluent instances of the LD calculus.}, language = {en} } @article{BarnesKennewell2015, author = {Barnes, Jan and Kennewell, Steve}, title = {Teacher Perceptions of Key Competencies in ICT}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82604}, pages = {61 -- 75}, year = {2015}, abstract = {Regardless of what is intended by government curriculum specifications and advised by educational experts, the competencies taught and learned in and out of classrooms can vary considerably. In this paper, we discuss in particular how we can investigate the perceptions that individual teachers have of competencies in ICT, and how these and other factors may influence students' learning. We report case study research which identifies contradictions within the teaching of ICT competencies as an activity system, highlighting issues concerning the object of the curriculum, the roles of the participants and the school cultures. In a particular case, contradictions in the learning objectives between higher order skills and the use of application tools have been resolved by a change in the teacher's perceptions which have not led to changes in other aspects of the activity system. We look forward to further investigation of the effects of these contradictions in other case studies and on forthcoming curriculum change.}, language = {en} } @article{Blaese2014, author = {Blaese, Leif}, title = {Data mining for unidentified protein squences}, series = {Process design for natural scientists: an agile model-driven approach}, journal = {Process design for natural scientists: an agile model-driven approach}, number = {500}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {73 -- 87}, year = {2014}, abstract = {Through the use of next generation sequencing (NGS) technology, a lot of newly sequenced organisms are now available. Annotating those genes is one of the most challenging tasks in sequence biology. Here, we present an automated workflow to find homologue proteins, annotate sequences according to function and create a three-dimensional model.}, language = {en} } @article{BordihnHolzer2021, author = {Bordihn, Henning and Holzer, Markus}, title = {On the number of active states in finite automata}, series = {Acta informatica}, volume = {58}, journal = {Acta informatica}, number = {4}, publisher = {Springer}, address = {Berlin ; Heidelberg [u.a.]}, issn = {0001-5903}, doi = {10.1007/s00236-021-00397-8}, pages = {301 -- 318}, year = {2021}, abstract = {We introduce a new measure of descriptional complexity on finite automata, called the number of active states. Roughly speaking, the number of active states of an automaton A on input w counts the number of different states visited during the most economic computation of the automaton A for the word w. This concept generalizes to finite automata and regular languages in a straightforward way. We show that the number of active states of both finite automata and regular languages is computable, even with respect to nondeterministic finite automata. We further compare the number of active states to related measures for regular languages. In particular, we show incomparability to the radius of regular languages and that the difference between the number of active states and the total number of states needed in finite automata for a regular language can be of exponential order.}, language = {en} } @article{BordihnVaszil2020, author = {Bordihn, Henning and Vaszil, Gy{\"o}rgy}, title = {Deterministic Lindenmayer systems with dynamic control of parallelism}, series = {International journal of foundations of computer science}, volume = {31}, journal = {International journal of foundations of computer science}, number = {1}, publisher = {World Scientific}, address = {Singapore}, issn = {0129-0541}, doi = {10.1142/S0129054120400031}, pages = {37 -- 51}, year = {2020}, abstract = {M-rate 0L systems are interactionless Lindenmayer systems together with a function assigning to every string a set of multisets of productions that may be applied simultaneously to the string. Some questions that have been left open in the forerunner papers are examined, and the computational power of deterministic M-rate 0L systems is investigated, where also tabled and extended variants are taken into consideration.}, language = {en} } @article{BordihnVaszil2021, author = {Bordihn, Henning and Vaszil, Gy{\"o}rgy}, title = {Reversible parallel communicating finite automata systems}, series = {Acta informatica}, volume = {58}, journal = {Acta informatica}, number = {4}, publisher = {Springer}, address = {Berlin ; Heidelberg ; New York, NY}, issn = {0001-5903}, doi = {10.1007/s00236-021-00396-9}, pages = {263 -- 279}, year = {2021}, abstract = {We study the concept of reversibility in connection with parallel communicating systems of finite automata (PCFA in short). We define the notion of reversibility in the case of PCFA (also covering the non-deterministic case) and discuss the relationship of the reversibility of the systems and the reversibility of its components. We show that a system can be reversible with non-reversible components, and the other way around, the reversibility of the components does not necessarily imply the reversibility of the system as a whole. We also investigate the computational power of deterministic centralized reversible PCFA. We show that these very simple types of PCFA (returning or non-returning) can recognize regular languages which cannot be accepted by reversible (deterministic) finite automata, and that they can even accept languages that are not context-free. We also separate the deterministic and non-deterministic variants in the case of systems with non-returning communication. We show that there are languages accepted by non-deterministic centralized PCFA, which cannot be recognized by any deterministic variant of the same type.}, language = {en} } @article{BottinoChioccariello2015, author = {Bottino, Rosa and Chioccariello, Augusto}, title = {Computational Thinking}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82820}, pages = {301 -- 309}, year = {2015}, abstract = {Digital technology has radically changed the way people work in industry, finance, services, media and commerce. Informatics has contributed to the scientific and technological development of our society in general and to the digital revolution in particular. Computational thinking is the term indicating the key ideas of this discipline that might be included in the key competencies underlying the curriculum of compulsory education. The educational potential of informatics has a history dating back to the sixties. In this article, we briefly revisit this history looking for lessons learned. In particular, we focus on experiences of teaching and learning programming. However, computational thinking is more than coding. It is a way of thinking and practicing interactive dynamic modeling with computers. We advocate that learners can practice computational thinking in playful contexts where they can develop personal projects, for example building videogames and/or robots, share and discuss their construction with others. In our view, this approach allows an integration of computational thinking in the K-12 curriculum across disciplines.}, language = {en} } @article{BredeBotta2021, author = {Brede, Nuria and Botta, Nicola}, title = {On the correctness of monadic backward induction}, series = {Journal of functional programming}, volume = {31}, journal = {Journal of functional programming}, publisher = {Cambridge University Press}, address = {Cambridge}, issn = {1469-7653}, doi = {10.1017/S0956796821000228}, pages = {39}, year = {2021}, abstract = {In control theory, to solve a finite-horizon sequential decision problem (SDP) commonly means to find a list of decision rules that result in an optimal expected total reward (or cost) when taking a given number of decision steps. SDPs are routinely solved using Bellman's backward induction. Textbook authors (e.g. Bertsekas or Puterman) typically give more or less formal proofs to show that the backward induction algorithm is correct as solution method for deterministic and stochastic SDPs. Botta, Jansson and Ionescu propose a generic framework for finite horizon, monadic SDPs together with a monadic version of backward induction for solving such SDPs. In monadic SDPs, the monad captures a generic notion of uncertainty, while a generic measure function aggregates rewards. In the present paper, we define a notion of correctness for monadic SDPs and identify three conditions that allow us to prove a correctness result for monadic backward induction that is comparable to textbook correctness proofs for ordinary backward induction. The conditions that we impose are fairly general and can be cast in category-theoretical terms using the notion of Eilenberg-Moore algebra. They hold in familiar settings like those of deterministic or stochastic SDPs, but we also give examples in which they fail. Our results show that backward induction can safely be employed for a broader class of SDPs than usually treated in textbooks. However, they also rule out certain instances that were considered admissible in the context of Botta et al. 's generic framework. Our development is formalised in Idris as an extension of the Botta et al. framework and the sources are available as supplementary material.}, language = {en} } @article{BrewkaEllmauthalerKernIsberneretal.2018, author = {Brewka, Gerhard and Ellmauthaler, Stefan and Kern-Isberner, Gabriele and Obermeier, Philipp and Ostrowski, Max and Romero, Javier and Schaub, Torsten H. and Schieweck, Steffen}, title = {Advanced solving technology for dynamic and reactive applications}, series = {K{\"u}nstliche Intelligenz}, volume = {32}, journal = {K{\"u}nstliche Intelligenz}, number = {2-3}, publisher = {Springer}, address = {Heidelberg}, issn = {0933-1875}, doi = {10.1007/s13218-018-0538-8}, pages = {199 -- 200}, year = {2018}, language = {en} } @article{BroekerKastensMagenheim2015, author = {Br{\"o}ker, Kathrin and Kastens, Uwe and Magenheim, Johannes}, title = {Competences of Undergraduate Computer Science Students}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82613}, pages = {77 -- 96}, year = {2015}, abstract = {The paper presents two approaches to the development of a Computer Science Competence Model for the needs of curriculum development and evaluation in Higher Education. A normativetheoretical approach is based on the AKT and ACM/IEEE curriculum and will be used within the recommendations of the German Informatics Society (GI) for the design of CS curricula. An empirically oriented approach refines the categories of the first one with regard to specific subject areas by conducting content analysis on CS curricula of important universities from several countries. The refined model will be used for the needs of students' e-assessment and subsequent affirmative action of the CS departments.}, language = {en} } @article{Buechner2015, author = {B{\"u}chner, Steffen}, title = {Empirical and Normative Research on Fundamental Ideas of Embedded System Development}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82949}, pages = {393 -- 396}, year = {2015}, language = {en} } @article{ChenLangeAndjelkovicetal.2022, author = {Chen, Junchao and Lange, Thomas and Andjelkovic, Marko and Simevski, Aleksandar and Lu, Li and Krstić, Miloš}, title = {Solar particle event and single event upset prediction from SRAM-based monitor and supervised machine learning}, series = {IEEE transactions on emerging topics in computing / IEEE Computer Society, Institute of Electrical and Electronics Engineers}, volume = {10}, journal = {IEEE transactions on emerging topics in computing / IEEE Computer Society, Institute of Electrical and Electronics Engineers}, number = {2}, publisher = {Institute of Electrical and Electronics Engineers}, address = {[New York, NY]}, issn = {2168-6750}, doi = {10.1109/TETC.2022.3147376}, pages = {564 -- 580}, year = {2022}, abstract = {The intensity of cosmic radiation may differ over five orders of magnitude within a few hours or days during the Solar Particle Events (SPEs), thus increasing for several orders of magnitude the probability of Single Event Upsets (SEUs) in space-borne electronic systems. Therefore, it is vital to enable the early detection of the SEU rate changes in order to ensure timely activation of dynamic radiation hardening measures. In this paper, an embedded approach for the prediction of SPEs and SRAM SEU rate is presented. The proposed solution combines the real-time SRAM-based SEU monitor, the offline-trained machine learning model and online learning algorithm for the prediction. With respect to the state-of-the-art, our solution brings the following benefits: (1) Use of existing on-chip data storage SRAM as a particle detector, thus minimizing the hardware and power overhead, (2) Prediction of SRAM SEU rate one hour in advance, with the fine-grained hourly tracking of SEU variations during SPEs as well as under normal conditions, (3) Online optimization of the prediction model for enhancing the prediction accuracy during run-time, (4) Negligible cost of hardware accelerator design for the implementation of selected machine learning model and online learning algorithm. The proposed design is intended for a highly dependable and self-adaptive multiprocessing system employed in space applications, allowing to trigger the radiation mitigation mechanisms before the onset of high radiation levels.}, language = {en} } @article{ChristensenKnezek2015, author = {Christensen, Rhonda and Knezek, Gerald}, title = {The Technology Proficiency Self-Assessment Questionnaire (TPSA)}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82838}, pages = {311 -- 318}, year = {2015}, abstract = {The Technology Proficiency Self-Assessment (TPSA) questionnaire has been used for 15 years in the USA and other nations as a self-efficacy measure for proficiencies fundamental to effective technology integration in the classroom learning environment. Internal consistency reliabilities for each of the five-item scales have typically ranged from .73 to .88 for preservice or inservice technology-using teachers. Due to changing technologies used in education, researchers sought to renovate partially obsolete items and extend self-efficacy assessment to new areas, such as social media and mobile learning. Analysis of 2014 data gathered on a new, 34 item version of the TPSA indicates that the four established areas of email, World Wide Web (WWW), integrated applications, and teaching with technology continue to form consistent scales with reliabilities ranging from .81 to .93, while the 14 new items gathered to represent emerging technologies and media separate into two scales, each with internal consistency reliabilities greater than .9. The renovated TPSA is deemed to be worthy of continued use in the teaching with technology context.}, language = {en} } @article{Curzon2015, author = {Curzon, Paul}, title = {Unplugged Computational Thinking for Fun}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82575}, pages = {15 -- 27}, year = {2015}, abstract = {Computational thinking is a fundamental skill set that is learned by studying Informatics and ICT. We argue that its core ideas can be introduced in an inspiring and integrated way to both teachers and students using fun and contextually rich cs4fn 'Computer Science for Fun' stories combined with 'unplugged' activities including games and magic tricks. We also argue that understanding people is an important part of computational thinking. Computational thinking can be fun for everyone when taught in kinaesthetic ways away from technology.}, language = {en} } @article{DagieneStupuriene2015, author = {Dagiene, Valentina and Stupuriene, Gabriele}, title = {Informatics Education based on Solving Attractive Tasks through a Contest}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82626}, pages = {97 -- 115}, year = {2015}, abstract = {The paper discusses the issue of supporting informatics (computer science) education through competitions for lower and upper secondary school students (8-19 years old). Competitions play an important role for learners as a source of inspiration, innovation, and attraction. Running contests in informatics for school students for many years, we have noticed that the students consider the contest experience very engaging and exciting as well as a learning experience. A contest is an excellent instrument to involve students in problem solving activities. An overview of infrastructure and development of an informatics contest from international level to the national one (the Bebras contest on informatics and computer fluency, originated in Lithuania) is presented. The performance of Bebras contests in 23 countries during the last 10 years showed an unexpected and unusually high acceptance by school students and teachers. Many thousands of students participated and got a valuable input in addition to their regular informatics lectures at school. In the paper, the main attention is paid to the developed tasks and analysis of students' task solving results in Lithuania.}, language = {en} } @article{DelgadoKloos2015, author = {Delgado Kloos, Carlos}, title = {What about the Competencies of Educators in the New Era of Digital Education?}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-83015}, pages = {435 -- 438}, year = {2015}, abstract = {A lot has been published about the competencies needed by students in the 21st century (Ravenscroft et al., 2012). However, equally important are the competencies needed by educators in the new era of digital education. We review the key competencies for educators in light of the new methods of teaching and learning proposed by Massive Open Online Courses (MOOCs) and their on-campus counterparts, Small Private Online Courses (SPOCs).}, language = {en} } @article{DiethelmSyrbe2015, author = {Diethelm, Ira and Syrbe, J{\"o}rn}, title = {Let's talk about CS!}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82983}, pages = {411 -- 414}, year = {2015}, abstract = {To communicate about a science is the most important key competence in education for any science. Without communication we cannot teach, so teachers should reflect about the language they use in class properly. But the language students and teachers use to communicate about their CS courses is very heterogeneous, inconsistent and deeply influenced by tool names. There is a big lack of research and discussion in CS education regarding the terminology and the role of concepts and tools in our science. We don't have a consistent set of terminology that we agree on to be helpful for learning our science. This makes it nearly impossible to do research on CS competencies as long as we have not agreed on the names we use to describe these. This workshop intends to provide room to fill with discussion and first ideas for future research in this field.}, language = {en} } @article{DimopoulosGebserLuehneetal.2019, author = {Dimopoulos, Yannis and Gebser, Martin and L{\"u}hne, Patrick and Romero Davila, Javier and Schaub, Torsten H.}, title = {plasp 3}, series = {Theory and practice of logic programming}, volume = {19}, journal = {Theory and practice of logic programming}, number = {3}, publisher = {Cambridge Univ. Press}, address = {New York}, issn = {1471-0684}, doi = {10.1017/S1471068418000583}, pages = {477 -- 504}, year = {2019}, abstract = {We describe the new version of the Planning Domain Definition Language (PDDL)-to-Answer Set Programming (ASP) translator plasp. First, it widens the range of accepted PDDL features. Second, it contains novel planning encodings, some inspired by Satisfiability Testing (SAT) planning and others exploiting ASP features such as well-foundedness. All of them are designed for handling multivalued fluents in order to capture both PDDL as well as SAS planning formats. Third, enabled by multishot ASP solving, it offers advanced planning algorithms also borrowed from SAT planning. As a result, plasp provides us with an ASP-based framework for studying a variety of planning techniques in a uniform setting. Finally, we demonstrate in an empirical analysis that these techniques have a significant impact on the performance of ASP planning.}, language = {en} } @article{EllisAbreuEllis2015, author = {Ellis, Jason Brent and Abreu-Ellis, Carla Reis}, title = {Student Perspectives of Social Networking use in Higher Education}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82632}, pages = {117 -- 131}, year = {2015}, abstract = {Social networks are currently at the forefront of tools that lend to Personal Learning Environments (PLEs). This study aimed to observe how students perceived PLEs, what they believed were the integral components of social presence when using Facebook as part of a PLE, and to describe student's preferences for types of interactions when using Facebook as part of their PLE. This study used mixed methods to analyze the perceptions of graduate and undergraduate students on the use of social networks, more specifically Facebook as a learning tool. Fifty surveys were returned representing a 65 \% response rate. Survey questions included both closed and open-ended questions. Findings suggested that even though students rated themselves relatively well in having requisite technology skills, and 94 \% of students used Facebook primarily for social use, they were hesitant to migrate these skills to academic use because of concerns of privacy, believing that other platforms could fulfil the same purpose, and by not seeing the validity to use Facebook in establishing social presence. What lies at odds with these beliefs is that when asked to identify strategies in Facebook that enabled social presence to occur in academic work, the majority of students identified strategies in five categories that lead to social presence establishment on Facebook during their coursework.}, language = {en} } @article{Frank2012, author = {Frank, Mario}, title = {Axiom relevance decision engine : technical report}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-72128}, year = {2012}, abstract = {This document presents an axiom selection technique for classic first order theorem proving based on the relevance of axioms for the proof of a conjecture. It is based on unifiability of predicates and does not need statistical information like symbol frequency. The scope of the technique is the reduction of the set of axioms and the increase of the amount of provable conjectures in a given time. Since the technique generates a subset of the axiom set, it can be used as a preprocessor for automated theorem proving. This technical report describes the conception, implementation and evaluation of ARDE. The selection method, which is based on a breadth-first graph search by unifiability of predicates, is a weakened form of the connection calculus and uses specialised variants or unifiability to speed up the selection. The implementation of the concept is evaluated with comparison to the results of the world championship of theorem provers of the year 2012 (CASC J6). It is shown that both the theorem prover leanCoP which uses the connection calculus and E which uses equality reasoning, can benefit from the selection approach. Also, the evaluation shows that the concept is applyable for theorem proving problems with thousands of formulae and that the selection is independent from the calculus used by the theorem prover.}, language = {en} } @article{GautamZhangLandwehretal.2021, author = {Gautam, Khem Raj and Zhang, Guoqiang and Landwehr, Niels and Adolphs, Julian}, title = {Machine learning for improvement of thermal conditions inside a hybrid ventilated animal building}, series = {Computers and electronics in agriculture : COMPAG online ; an international journal}, volume = {187}, journal = {Computers and electronics in agriculture : COMPAG online ; an international journal}, publisher = {Elsevier Science}, address = {Amsterdam [u.a.]}, issn = {0168-1699}, doi = {10.1016/j.compag.2021.106259}, pages = {10}, year = {2021}, abstract = {In buildings with hybrid ventilation, natural ventilation opening positions (windows), mechanical ventilation rates, heating, and cooling are manipulated to maintain desired thermal conditions. The indoor temperature is regulated solely by ventilation (natural and mechanical) when the external conditions are favorable to save external heating and cooling energy. The ventilation parameters are determined by a rule-based control scheme, which is not optimal. This study proposes a methodology to enable real-time optimum control of ventilation parameters. We developed offline prediction models to estimate future thermal conditions from the data collected from building in operation. The developed offline model is then used to find the optimal controllable ventilation parameters in real-time to minimize the setpoint deviation in the building. With the proposed methodology, the experimental building's setpoint deviation improved for 87\% of time, on average, by 0.53 degrees C compared to the current deviations.}, language = {en} } @article{GebserKaminskiKaufmannetal.2018, author = {Gebser, Martin and Kaminski, Roland and Kaufmann, Benjamin and L{\"u}hne, Patrick and Obermeier, Philipp and Ostrowski, Max and Romero Davila, Javier and Schaub, Torsten H. and Schellhorn, Sebastian and Wanko, Philipp}, title = {The Potsdam Answer Set Solving Collection 5.0}, series = {K{\"u}nstliche Intelligenz}, volume = {32}, journal = {K{\"u}nstliche Intelligenz}, number = {2-3}, publisher = {Springer}, address = {Heidelberg}, issn = {0933-1875}, doi = {10.1007/s13218-018-0528-x}, pages = {181 -- 182}, year = {2018}, abstract = {The Potsdam answer set solving collection, or Potassco for short, bundles various tools implementing and/or applying answer set programming. The article at hand succeeds an earlier description of the Potassco project published in Gebser et al. (AI Commun 24(2):107-124, 2011). Hence, we concentrate in what follows on the major features of the most recent, fifth generation of the ASP system clingo and highlight some recent resulting application systems.}, language = {en} } @article{GianniniRichterServettoetal.2018, author = {Giannini, Paola and Richter, Tim and Servetto, Marco and Zucca, Elena}, title = {Tracing sharing in an imperative pure calculus}, series = {Science of computer programming}, volume = {172}, journal = {Science of computer programming}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0167-6423}, doi = {10.1016/j.scico.2018.11.007}, pages = {180 -- 202}, year = {2018}, abstract = {We introduce a type and effect system, for an imperative object calculus, which infers sharing possibly introduced by the evaluation of an expression, represented as an equivalence relation among its free variables. This direct representation of sharing effects at the syntactic level allows us to express in a natural way, and to generalize, widely-used notions in literature, notably uniqueness and borrowing. Moreover, the calculus is pure in the sense that reduction is defined on language terms only, since they directly encode store. The advantage of this non-standard execution model with respect to a behaviorally equivalent standard model using a global auxiliary structure is that reachability relations among references are partly encoded by scoping. (C) 2018 Elsevier B.V. All rights reserved.}, language = {en} } @article{GrillenbergerRomeike2015, author = {Grillenberger, Andreas and Romeike, Ralf}, title = {Teaching Data Management}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82648}, pages = {133 -- 150}, year = {2015}, abstract = {Data management is a central topic in computer science as well as in computer science education. Within the last years, this topic is changing tremendously, as its impact on daily life becomes increasingly visible. Nowadays, everyone not only needs to manage data of various kinds, but also continuously generates large amounts of data. In addition, Big Data and data analysis are intensively discussed in public dialogue because of their influences on society. For the understanding of such discussions and for being able to participate in them, fundamental knowledge on data management is necessary. Especially, being aware of the threats accompanying the ability to analyze large amounts of data in nearly real-time becomes increasingly important. This raises the question, which key competencies are necessary for daily dealings with data and data management. In this paper, we will first point out the importance of data management and of Big Data in daily life. On this basis, we will analyze which are the key competencies everyone needs concerning data management to be able to handle data in a proper way in daily life. Afterwards, we will discuss the impact of these changes in data management on computer science education and in particular database education.}, language = {en} } @article{HaubeltNeubauerSchaubetal.2018, author = {Haubelt, Christian and Neubauer, Kai and Schaub, Torsten H. and Wanko, Philipp}, title = {Design space exploration with answer set programming}, series = {K{\"u}nstliche Intelligenz}, volume = {32}, journal = {K{\"u}nstliche Intelligenz}, number = {2-3}, publisher = {Springer}, address = {Heidelberg}, issn = {0933-1875}, doi = {10.1007/s13218-018-0530-3}, pages = {205 -- 206}, year = {2018}, abstract = {The aim of our project design space exploration with answer set programming is to develop a general framework based on Answer Set Programming (ASP) that finds valid solutions to the system design problem and simultaneously performs Design Space Exploration (DSE) to find the most favorable alternatives. We leverage recent developments in ASP solving that allow for tight integration of background theories to create a holistic framework for effective DSE.}, language = {en} } @article{Haugsbakken2015, author = {Haugsbakken, Halvdan}, title = {The Student Learning Ecology}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82659}, pages = {151 -- 169}, year = {2015}, abstract = {Educational research on social media has showed that students use it for socialisation, personal communication, and informal learning. Recent studies have argued that students to some degree use social media to carry out formal schoolwork. This article gives an explorative account on how a small sample of Norwegian high school students use social media to self-organise formal schoolwork. This user pattern can be called a "student learning ecology", which is a user perspective on how participating students gain access to learning resources.}, language = {en} } @article{Hibbe2014, author = {Hibbe, Marcel}, title = {Spotlocator - Guess Where the Photo Was Taken!}, series = {Process Design for Natural Scientists: an agile model-driven approach}, journal = {Process Design for Natural Scientists: an agile model-driven approach}, number = {500}, editor = {Lambrecht, Anna-Lena and Margaria, Tiziana}, publisher = {Springer Verlag}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {149 -- 160}, year = {2014}, abstract = {Spotlocator is a game wherein people have to guess the spots of where photos were taken. The photos of a defined area for each game are from panoramio.com. They are published at http://spotlocator. drupalgardens.com with an ID. Everyone can guess the photo spots by sending a special tweet via Twitter that contains the hashtag \#spotlocator, the guessed coordinates and the ID of the photo. An evaluation is published for all tweets. The players are informed about the distance to the real photo spots and the positions are shown on a map.}, language = {en} } @article{Holler2014, author = {Holler, Robin}, title = {GraffDok - a graffiti documentation application}, series = {Process design for natural scientists: an agile model-driven approach}, journal = {Process design for natural scientists: an agile model-driven approach}, number = {500}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {239 -- 251}, year = {2014}, abstract = {GraffDok is an application helping to maintain an overview over sprayed images somewhere in a city. At the time of writing it aims at vandalism rather than at beautiful photographic graffiti in an underpass. Looking at hundreds of tags and scribbles on monuments, house walls, etc. it would be interesting to not only record them in writing but even make them accessible electronically, including images. GraffDok's workflow is simple and only requires an EXIF-GPS-tagged photograph of a graffito. It automatically determines its location by using reverse geocoding with the given GPS-coordinates and the Gisgraphy WebService. While asking the user for some more meta data, GraffDok analyses the image in parallel with this and tries to detect fore- and background - before extracting the drawing lines and make them stand alone. The command line based tool ImageMagick is used here as well as for accessing EXIF data. Any meta data is written to csv-files, which will stay easily accessible and can be integrated in TeX-files as well. The latter ones are converted to PDF at the end of the workflow, containing a table about all graffiti and a summary for each - including the generated characteristic graffiti pattern image.}, language = {en} } @article{HollmannFrohmeEndrullatetal.2020, author = {Hollmann, Susanne and Frohme, Marcus and Endrullat, Christoph and Kremer, Andreas and D'Elia, Domenica and Regierer, Babette and Nechyporenko, Alina}, title = {Ten simple rules on how to write a standard operating procedure}, series = {PLOS Computational Biology}, volume = {16}, journal = {PLOS Computational Biology}, number = {9}, publisher = {PLOS}, address = {San Francisco}, pages = {10}, year = {2020}, abstract = {Research publications and data nowadays should be publicly available on the internet and, theoretically, usable for everyone to develop further research, products, or services. The long-term accessibility of research data is, therefore, fundamental in the economy of the research production process. However, the availability of data is not sufficient by itself, but also their quality must be verifiable. Measures to ensure reuse and reproducibility need to include the entire research life cycle, from the experimental design to the generation of data, quality control, statistical analysis, interpretation, and validation of the results. Hence, high-quality records, particularly for providing a string of documents for the verifiable origin of data, are essential elements that can act as a certificate for potential users (customers). These records also improve the traceability and transparency of data and processes, therefore, improving the reliability of results. Standards for data acquisition, analysis, and documentation have been fostered in the last decade driven by grassroot initiatives of researchers and organizations such as the Research Data Alliance (RDA). Nevertheless, what is still largely missing in the life science academic research are agreed procedures for complex routine research workflows. Here, well-crafted documentation like standard operating procedures (SOPs) offer clear direction and instructions specifically designed to avoid deviations as an absolute necessity for reproducibility. Therefore, this paper provides a standardized workflow that explains step by step how to write an SOP to be used as a starting point for appropriate research documentation.}, language = {en} } @article{HuangRichterKleickmannetal.2021, author = {Huang, Yizhen and Richter, Eric and Kleickmann, Thilo and Wiepke, Axel and Richter, Dirk}, title = {Classroom complexity affects student teachers' behavior in a VR classroom}, series = {Computers \& education : an international journal}, volume = {163}, journal = {Computers \& education : an international journal}, publisher = {Elsevier}, address = {Oxford}, issn = {0360-1315}, doi = {10.1016/j.compedu.2020.104100}, pages = {15}, year = {2021}, abstract = {Student teachers often struggle to keep track of everything that is happening in the classroom, and particularly to notice and respond when students cause disruptions. The complexity of the classroom environment is a potential contributing factor that has not been empirically tested. In this experimental study, we utilized a virtual reality (VR) classroom to examine whether classroom complexity affects the likelihood of student teachers noticing disruptions and how they react after noticing. Classroom complexity was operationalized as the number of disruptions and the existence of overlapping disruptions (multidimensionality) as well as the existence of parallel teaching tasks (simultaneity). Results showed that student teachers (n = 50) were less likely to notice the scripted disruptions, and also less likely to respond to the disruptions in a comprehensive and effortful manner when facing greater complexity. These results may have implications for both teacher training and the design of VR for training or research purpose. This study contributes to the field from two aspects: 1) it revealed how features of the classroom environment can affect student teachers' noticing of and reaction to disruptions; and 2) it extends the functionality of the VR environment-from a teacher training tool to a testbed of fundamental classroom processes that are difficult to manipulate in real-life.}, language = {en} } @article{Jones2015, author = {Jones, Anthony}, title = {ICT Competencies for School Students}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82663}, pages = {171 -- 179}, year = {2015}, abstract = {This paper discusses results from a small-scale research study, together with some recently published research into student perceptions of ICT for learning in schools, to consider relevant skills that do not appear to currently being taught. The paper concludes by raising three issues relating to learning with and through ICT that need to be addressed in school curricula and classroom teaching.}, language = {en} } @article{Kalas2015, author = {Kalas, Ivan}, title = {Programming at Pre-primary and Primary Levels}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82587}, pages = {29 -- 31}, year = {2015}, language = {en} } @article{KreowskyStabernack2021, author = {Kreowsky, Philipp and Stabernack, Christian Benno}, title = {A full-featured FPGA-based pipelined architecture for SIFT extraction}, series = {IEEE access : practical research, open solutions / Institute of Electrical and Electronics Engineers}, volume = {9}, journal = {IEEE access : practical research, open solutions / Institute of Electrical and Electronics Engineers}, publisher = {Inst. of Electr. and Electronics Engineers}, address = {New York, NY}, issn = {2169-3536}, doi = {10.1109/ACCESS.2021.3104387}, pages = {128564 -- 128573}, year = {2021}, abstract = {Image feature detection is a key task in computer vision. Scale Invariant Feature Transform (SIFT) is a prevalent and well known algorithm for robust feature detection. However, it is computationally demanding and software implementations are not applicable for real-time performance. In this paper, a versatile and pipelined hardware implementation is proposed, that is capable of computing keypoints and rotation invariant descriptors on-chip. All computations are performed in single precision floating-point format which makes it possible to implement the original algorithm with little alteration. Various rotation resolutions and filter kernel sizes are supported for images of any resolution up to ultra-high definition. For full high definition images, 84 fps can be processed. Ultra high definition images can be processed at 21 fps.}, language = {en} } @article{Kuntzsch2014, author = {Kuntzsch, Christian}, title = {Visualization of data transfer paths}, series = {Process design for natural scientists: an agile model-driven approach}, journal = {Process design for natural scientists: an agile model-driven approach}, number = {500}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {140 -- 148}, year = {2014}, abstract = {A workflow for visualizing server connections using the Google Maps API was built in the jABC. It makes use of three basic services: An XML-based IP address geolocation web service, a command line tool and the Static Maps API. The result of the workflow is an URL leading to an image file of a map, showing server connections between a client and a target host.}, language = {en} } @article{LagriffoulAndres2016, author = {Lagriffoul, Fabien and Andres, Benjamin}, title = {Combining task and motion planning}, series = {The international journal of robotics research}, volume = {35}, journal = {The international journal of robotics research}, number = {8}, publisher = {Sage Science Press}, address = {Thousand Oaks}, issn = {1741-3176}, doi = {10.1177/0278364915619022}, pages = {890 -- 927}, year = {2016}, abstract = {Solving problems combining task and motion planning requires searching across a symbolic search space and a geometric search space. Because of the semantic gap between symbolic and geometric representations, symbolic sequences of actions are not guaranteed to be geometrically feasible. This compels us to search in the combined search space, in which frequent backtracks between symbolic and geometric levels make the search inefficient.We address this problem by guiding symbolic search with rich information extracted from the geometric level through culprit detection mechanisms.}, language = {en} } @article{LaiDavisEickelmannetal.2015, author = {Lai, Kwok-Wing and Davis, Niki and Eickelmann, Birgit and Erstad, Ola and Fisser, Petra and Gibson, David and Khaddage, Ferial and Knezek, Gerald and Webb, Mary}, title = {Tackling Educational Challenges in a Digitally Networked World}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82997}, pages = {415 -- 423}, year = {2015}, language = {en} } @article{LamprechtMargaria2014, author = {Lamprecht, Anna-Lena and Margaria, Tiziana}, title = {Scientific Workflows and XMDD}, series = {Process Design for Natural Scientists: an agile model-driven approach}, journal = {Process Design for Natural Scientists: an agile model-driven approach}, number = {500}, editor = {Lambrecht, Anna-Lena and Margaria, Tiziana}, publisher = {Springer Verlag}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {1 -- 13}, year = {2014}, abstract = {A major part of the scientific experiments that are carried out today requires thorough computational support. While database and algorithm providers face the problem of bundling resources to create and sustain powerful computation nodes, the users have to deal with combining sets of (remote) services into specific data analysis and transformation processes. Today's attention to "big data" amplifies the issues of size, heterogeneity, and process-level diversity/integration. In the last decade, especially workflow-based approaches to deal with these processes have enjoyed great popularity. This book concerns a particularly agile and model-driven approach to manage scientific workflows that is based on the XMDD paradigm. In this chapter we explain the scope and purpose of the book, briefly describe the concepts and technologies of the XMDD paradigm, explain the principal differences to related approaches, and outline the structure of the book.}, language = {en} } @article{LamprechtMargaria2015, author = {Lamprecht, Anna-Lena and Margaria, Tiziana}, title = {Scientific workflows and XMDD}, series = {Process design for natural scientists}, journal = {Process design for natural scientists}, editor = {Lamprecht, Anna-Lena and Margaria, Tiziana}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-662-45006-2}, pages = {1 -- 13}, year = {2015}, language = {en} } @article{LamprechtMargariaSteffen2014, author = {Lamprecht, Anna-Lena and Margaria, Tiziana and Steffen, Bernhard}, title = {Modeling and Execution of Scientific Workflows with the jABC Framework}, series = {Process Design for Natural Scientists: an agile model-driven approach}, journal = {Process Design for Natural Scientists: an agile model-driven approach}, number = {500}, editor = {Lambrecht, Anna-Lena and Margaria, Tiziana}, publisher = {Springer Verlag}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {14 -- 29}, year = {2014}, abstract = {We summarize here the main characteristics and features of the jABC framework, used in the case studies as a graphical tool for modeling scientific processes and workflows. As a comprehensive environment for service-oriented modeling and design according to the XMDD (eXtreme Model-Driven Design) paradigm, the jABC offers much more than the pure modeling capability. Associated technologies and plugins provide in fact means for a rich variety of supporting functionality, such as remote service integration, taxonomical service classification, model execution, model verification, model synthesis, and model compilation. We describe here in short both the essential jABC features and the service integration philosophy followed in the environment. In our work over the last years we have seen that this kind of service definition and provisioning platform has the potential to become a core technology in interdisciplinary service orchestration and technology transfer: Domain experts, like scientists not specially trained in computer science, directly define complex service orchestrations as process models and use efficient and complex domain-specific tools in a simple and intuitive way.}, language = {en} } @article{LamprechtWickert2014, author = {Lamprecht, Anna-Lena and Wickert, Alexander}, title = {The Course's SIB Libraries}, series = {Process Design for Natural Scientists: an agile model-driven approach}, journal = {Process Design for Natural Scientists: an agile model-driven approach}, number = {500}, editor = {Lambrecht, Anna-Lena and Margaria, Tiziana}, publisher = {Springer Verlag}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {30 -- 44}, year = {2014}, abstract = {This chapter gives a detailed description of the service framework underlying all the example projects that form the foundation of this book. It describes the different SIB libraries that we made available for the course "Process modeling in the natural sciences" to provide the functionality that was required for the envisaged applications. The students used these SIB libraries to realize their projects.}, language = {en} } @article{LamprechtWickertMargaria2014, author = {Lamprecht, Anna-Lena and Wickert, Alexander and Margaria, Tiziana}, title = {Lessons Learned}, series = {Process Design for Natural Scientists: an agile model-driven approach}, journal = {Process Design for Natural Scientists: an agile model-driven approach}, number = {500}, editor = {Lambrecht, Anna-Lena and Margaria, Tiziana}, publisher = {Springer Verlag}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {45 -- 64}, year = {2014}, abstract = {This chapter summarizes the experience and the lessons we learned concerning the application of the jABC as a framework for design and execution of scientific workflows. It reports experiences from the domain modeling (especially service integration) and workflow design phases and evaluates the resulting models statistically with respect to the SIB library and hierarchy levels.}, language = {en} } @article{LewinMcNicol2015, author = {Lewin, Cathy and McNicol, Sarah}, title = {Supporting the Development of 21st Century Skills through ICT}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82672}, pages = {181 -- 198}, year = {2015}, abstract = {The growing impact of globalisation and the development of a 'knowledge society' have led many to argue that 21st century skills are essential for life in twenty-first century society and that ICT is central to their development. This paper describes how 21st century skills, in particular digital literacy, critical thinking, creativity, communication and collaboration skills, have been conceptualised and embedded in the resources developed for teachers in iTEC, a four-year, European project. The effectiveness of this approach is considered in light of the data collected through the evaluation of the pilots, which considers both the potential benefits of using technology to support the development of 21st century skills, but also the challenges of doing so. Finally, the paper discusses the learning support systems required in order to transform pedagogies and embed 21st century skills. It is argued that support is required in standards and assessment; curriculum and instruction; professional development; and learning environments.}, language = {en} } @article{Lis2014, author = {Lis, Monika}, title = {Constructing a Phylogenetic Tree}, series = {Process Design for Natural Scientists: an agile model-driven approach}, journal = {Process Design for Natural Scientists: an agile model-driven approach}, number = {500}, editor = {Lambrecht, Anna-Lena and Margaria, Tiziana}, publisher = {Springer Verlag}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {101 -- 109}, year = {2014}, abstract = {In this project I constructed a workflow that takes a DNA sequence as input and provides a phylogenetic tree, consisting of the input sequence and other sequences which were found during a database search. In this phylogenetic tree the sequences are arranged depending on similarities. In bioinformatics, constructing phylogenetic trees is often used to explore the evolutionary relationships of genes or organisms and to understand the mechanisms of evolution itself.}, language = {en} } @article{LutherTiberiusBrem2020, author = {Luther, Laura and Tiberius, Victor and Brem, Alexander}, title = {User experience (UX) in business, management, and psychology}, series = {Multimodal technologies and interaction : open access journal}, volume = {4}, journal = {Multimodal technologies and interaction : open access journal}, number = {2}, publisher = {MDPI}, address = {Basel}, issn = {2414-4088}, doi = {10.3390/mti4020018}, pages = {19}, year = {2020}, abstract = {User Experience (UX) describes the holistic experience of a user before, during, and after interaction with a platform, product, or service. UX adds value and attraction to their sole functionality and is therefore highly relevant for firms. The increased interest in UX has produced a vast amount of scholarly research since 1983. The research field is, therefore, complex and scattered. Conducting a bibliometric analysis, we aim at structuring the field quantitatively and rather abstractly. We employed citation analyses, co-citation analyses, and content analyses to evaluate productivity and impact of extant research. We suggest that future research should focus more on business and management related topics.}, language = {en} } @article{MagenheimNellesNeugebaueretal.2015, author = {Magenheim, Johannes and Nelles, Wolfgang and Neugebauer, Jonas and Ohrndorf, Laura and Schaper, Niclas and Schubert, Sigrid}, title = {Expert Rating of Competence Levels in Upper Secondary Computer Science Education}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82683}, pages = {199 -- 216}, year = {2015}, abstract = {In the project MoKoM, which is funded by the German Research Foundation (DFG) from 2008 to 2012, a test instrument measuring students' competences in computer science was developed. This paper presents the results of an expert rating of the levels of students' competences done for the items of the instrument. At first we will describe the difficulty-relevant features that were used for the evaluation. These were deduced from computer science, psychological and didactical findings and resources. Potentials and desiderata of this research method are discussed further on. Finally we will present our conclusions on the results and give an outlook on further steps.}, language = {en} } @article{MagenheimSchubertSchapert2015, author = {Magenheim, Johannes and Schubert, Sigrid and Schapert, Niclas}, title = {Modelling and Measurement of Competencies in Computer Science Education}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82592}, pages = {33 -- 57}, year = {2015}, abstract = {As a result of the Bologna reform of educational systems in Europe the outcome orientation of learning processes, competence-oriented descriptions of the curricula and competence-oriented assessment procedures became standard also in Computer Science Education (CSE). The following keynote addresses important issues of shaping a CSE competence model especially in the area of informatics system comprehension and object-oriented modelling. Objectives and research methodology of the project MoKoM (Modelling and Measurement of Competences in CSE) are explained. Firstly, the CSE competence model was derived based on theoretical concepts and then secondly the model was empirically examined and refined using expert interviews. Furthermore, the paper depicts the development and examination of a competence measurement instrument, which was derived from the competence model. Therefore, the instrument was applied to a large sample of students at the gymnasium's upper class level. Subsequently, efforts to develop a competence level model, based on the retrieved empirical results and on expert ratings are presented. Finally, further demands on research on competence modelling in CSE will be outlined.}, language = {en} } @article{MainaAngondiWaga2015, author = {Maina, Anthony Gioko and Angondi, Enos Kiforo and Waga, Rosemary}, title = {How does the Implementation of a Literacy Learning Tool Kit influence Literacy Skill Acquisition?}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82856}, pages = {319 -- 326}, year = {2015}, abstract = {This study aimed at following how teachers transfer skills into results while using ABRA literacy software. This was done in the second part of the pilot study whose aim was to provide equity to control group teachers and students by exposing them to the ABRACADABRA treatment after the end of phase 1. This opportunity was used to follow the phase 1 teachers to see how the skills learned were being transformed into results. A standard three-day initial training and planning session on how to use ABRA to teach literacy was held at the beginning of each phase for ABRA teachers (phase 1 experimental and phase 2 delayed ABRA). Teachers were provided with teaching materials including a tentative ABRA curriculum developed to align with the Kenyan English Language requirements for year 1 and 3 students. Results showed that although there was no significant difference between the groups in vocabulary-related subscales which include word reading and meaning as well as sentence comprehension, students in ABRACADABRA classes improved their scores at a significantly higher rate than students in control classes in comprehension related scores. An average student in the ABRACADABRA group improved by 12 and 16 percentile points respectively compared to their counterparts in the control group.}, language = {en} } @article{Micheuz2015, author = {Micheuz, Peter}, title = {Discussing Educational Standards for Digital Competence and/or Informatics Education at Lower Secondary Level}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-83008}, pages = {425 -- 431}, year = {2015}, abstract = {Participants of this workshop will be confronted exemplarily with a considerable inconsistency of global Informatics education at lower secondary level. More importantly, they are invited to contribute actively on this issue in form of short case studies of their countries. Until now, very few countries have been successful in implementing Informatics or Computing at primary and lower secondary level. The spectrum from digital literacy to informatics, particularly as a discipline in its own right, has not really achieved a breakthrough and seems to be underrepresented for these age groups. The goal of this workshop is not only to discuss the anamnesis and diagnosis of this fragmented field, but also to discuss and suggest viable forms of therapy in form of setting educational standards. Making visible good practices in some countries and comparing successful approaches are rewarding tasks for this workshop. Discussing and defining common educational standards on a transcontinental level for the age group of 14 to 15 years old students in a readable, assessable and acceptable form should keep the participants of this workshop active beyond the limited time at the workshop.}, language = {en} } @article{Noack2014, author = {Noack, Franziska}, title = {CREADED: Colored-Relief application for digital elevation data}, series = {Process design for natural scientists: an agile model-driven approach}, journal = {Process design for natural scientists: an agile model-driven approach}, number = {500}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {186 -- 199}, year = {2014}, abstract = {In the geoinformatics field, remote sensing data is often used for analyzing the characteristics of the current investigation area. This includes DEMs, which are simple raster grids containing grey scales representing the respective elevation values. The project CREADED that is presented in this paper aims at making these monochrome raster images more significant and more intuitively interpretable. For this purpose, an executable interactive model for creating a colored and relief-shaded Digital Elevation Model (DEM) has been designed using the jABC framework. The process is based on standard jABC-SIBs and SIBs that provide specific GIS functions, which are available as Web services, command line tools and scripts.}, language = {en} } @article{NylenDoerge2013, author = {Nyl{\´e}n, Aletta and D{\"o}rge, Christina}, title = {Using competencies to structure scientific writing education}, series = {Commentarii informaticae didacticae : (CID)}, journal = {Commentarii informaticae didacticae : (CID)}, number = {5}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-64851}, pages = {33 -- 44}, year = {2013}, abstract = {Scientific writing is an important skill for computer science and computer engineering professionals. In this paper we present a writing concept across the curriculum program directed towards scientific writing. The program is built around a hierarchy of learning outcomes. The hierarchy is constructed through analyzing the learning outcomes in relation to competencies that are needed to fulfill them.}, language = {en} } @article{Ohrndorf2015, author = {Ohrndorf, Laura}, title = {Assignments in Computer Science Education}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82868}, pages = {327 -- 333}, year = {2015}, abstract = {In this paper we describe the recent state of our research project concerning computer science teachers' knowledge on students' cognition. We did a comprehensive analysis of textbooks, curricula and other resources, which give teachers guidance to formulate assignments. In comparison to other subjects there are only a few concepts and strategies taught to prospective computer science teachers in university. We summarize them and given an overview on our empirical approach to measure this knowledge.}, language = {en} } @article{Opel2015, author = {Opel, Simone}, title = {On the Way to a "General Model of Contextualised Computer Science Education"}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82953}, pages = {397 -- 400}, year = {2015}, language = {en} } @article{OpelKramerTrommenetal.2015, author = {Opel, Simone and Kramer, Matthias and Trommen, Michael and Pottb{\"a}cker, Florian and Ilaghef, Youssef}, title = {BugHunt}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82693}, pages = {217 -- 233}, year = {2015}, abstract = {Competencies related to operating systems and computer security are usually taught systematically. In this paper we present a different approach, in which students have to remove virus-like behaviour on their respective computers, which has been induced by software developed for this purpose. They have to develop appropriate problem-solving strategies and thereby explore essential elements of the operating system. The approach was implemented exemplarily in two computer science courses at a regional general upper secondary school and showed great motivation and interest in the participating students.}, language = {en} } @article{OrBach2015, author = {Or-Bach, Rachel}, title = {Programming for Non-Programmers}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82875}, pages = {335 -- 342}, year = {2015}, abstract = {The study reported in this paper involved the employment of specific in-class exercises using a Personal Response System (PRS). These exercises were designed with two goals: to enhance students' capabilities of tracing a given code and of explaining a given code in natural language with some abstraction. The paper presents evidence from the actual use of the PRS along with students' subjective impressions regarding both the use of the PRS and the special exercises. The conclusions from the findings are followed with a short discussion on benefits of PRS-based mental processing exercises for learning programming and beyond.}, language = {en} } @article{PassigTzurielKedmi2015, author = {Passig, David and Tzuriel, David and Kedmi, Ganit Eshel}, title = {Improving children's Cognitive Modifiability through Mediated Learning and Dynamic Assessment within 3D Immersive Virtual Reality Environment}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82705}, pages = {235 -- 252}, year = {2015}, abstract = {The objectives of this study were to examine (a) the effect of dynamic assessment (DA) in a 3D Immersive Virtual Reality (IVR) environment as compared with computerized 2D and noncomputerized (NC) situations on cognitive modifiability, and (b) the transfer effects of these conditions on more difficult problem solving administered two weeks later in a non-computerized environment. A sample of 117 children aged 6:6-9:0 years were randomly assigned into three experimental groups of DA conditions: 3D, 2D, and NC, and one control group (C). All groups received the pre- and post-teaching Analogies subtest of the Cognitive Modifiability Battery (CMB-AN). The experimental groups received a teaching phase in conditions similar to the pre-and post-teaching phases. The findings showed that cognitive modifiability, in a 3D IVR, was distinctively higher than in the two other experimental groups (2D computer group and NC group). It was also found that the 3D group showed significantly higher performance in transfer problems than the 2D and NC groups.}, language = {en} } @article{Petre2013, author = {Petre, Marian}, title = {Computing is not a spectator sport}, series = {Commentarii informaticae didacticae : (CID)}, journal = {Commentarii informaticae didacticae : (CID)}, number = {5}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-65045}, pages = {155 -- 159}, year = {2013}, abstract = {This talk will describe My Digital Life (TU100), a distance learning module that introduces computer science through immediate engagement with ubiquitous computing (ubicomp). This talk will describe some of the principles and concepts we have adopted for this modern computing introduction: the idea of the 'informed digital citizen'; engagement through narrative; playful pedagogy; making the power of ubicomp available to novices; setting technical skills in real contexts. It will also trace how the pedagogy is informed by experiences and research in Computer Science education.}, language = {en} } @article{PousttchiGleiss2019, author = {Pousttchi, Key and Gleiß, Alexander}, title = {Surrounded by middlemen - how multi-sided platforms change the insurance industry}, series = {Electron Markets}, volume = {29}, journal = {Electron Markets}, number = {4}, publisher = {Springer}, address = {Heidelberg}, issn = {1019-6781}, doi = {10.1007/s12525-019-00363-w}, pages = {609 -- 629}, year = {2019}, abstract = {Multi-sided platforms (MSP) strongly affect markets and play a crucial part within the digital and networked economy. Although empirical evidence indicates their occurrence in many industries, research has not investigated the game-changing impact of MSP on traditional markets to a sufficient extent. More specifically, we have little knowledge of how MSP affect value creation and customer interaction in entire markets, exploiting the potential of digital technologies to offer new value propositions. Our paper addresses this research gap and provides an initial systematic approach to analyze the impact of MSP on the insurance industry. For this purpose, we analyze the state of the art in research and practice in order to develop a reference model of the value network for the insurance industry. On this basis, we conduct a case-study analysis to discover and analyze roles which are occupied or even newly created by MSP. As a final step, we categorize MSP with regard to their relation to traditional insurance companies, resulting in a classification scheme with four MSP standard types: Competition, Coordination, Cooperation, Collaboration.}, language = {en} } @article{PrestonYounie2015, author = {Preston, Christina and Younie, Sarah}, title = {Mentoring in a Digital World}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82895}, pages = {343 -- 350}, year = {2015}, abstract = {This paper focuses on the results of the evaluation of the first pilot of an e-mentoring unit designed by the Hands-On ICT consortium, funded by the EU LLL programme. The overall aim of this two-year activity is to investigate the value for professional learning of Massive Online Open Courses (MOOCs) and Community Online Open Courses (COOCs) in the context of a 'community of practice'. Three units in the first pilot covered aspects of using digital technologies to develop creative thinking skills. The findings in this paper relate to the fourth unit about e-mentoring, a skill that was important to delivering the course content in the other three units. Findings about the e-mentoring unit included: the students' request for detailed profiles so that participants can get to know each other; and, the need to reconcile the different interpretations of e-mentoring held by the participants when the course begins. The evaluators concluded that the major issues were that: not all professional learners would self-organise and network; and few would wish to mentor their colleagues voluntarily. Therefore, the e-mentoring issues will need careful consideration in pilots two and three to identify how e-mentoring will be organised.}, language = {en} } @article{PrzybyllaRomeike2015, author = {Przybylla, Mareen and Romeike, Ralf}, title = {Key Competences with Physical Computing}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82904}, pages = {351 -- 361}, year = {2015}, abstract = {Physical computing covers the design and realization of interactive objects and installations and allows students to develop concrete, tangible products of the real world that arise from the learners' imagination. This way, constructionist learning is raised to a level that enables students to gain haptic experience and thereby concretizes the virtual. In this paper the defining characteristics of physical computing are described. Key competences to be gained with physical computing will be identified.}, language = {en} } @article{Reso2014, author = {Reso, Judith}, title = {Protein Classification Workflow}, series = {Process Design for Natural Scientists: an agile model-driven approach}, journal = {Process Design for Natural Scientists: an agile model-driven approach}, number = {500}, editor = {Lambrecht, Anna-Lena and Margaria, Tiziana}, publisher = {Springer Verlag}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {65 -- 72}, year = {2014}, abstract = {The protein classification workflow described in this report enables users to get information about a novel protein sequence automatically. The information is derived by different bioinformatic analysis tools which calculate or predict features of a protein sequence. Also, databases are used to compare the novel sequence with known proteins.}, language = {en} } @article{Respondek2014, author = {Respondek, Tobias}, title = {A workflow for computing potential areas for wind turbines}, series = {Process design for natural scientists: an agile model-driven approach}, journal = {Process design for natural scientists: an agile model-driven approach}, number = {500}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-662-45005-5}, pages = {200 -- 215}, year = {2014}, abstract = {This paper describes the implementation of a workflow model for service-oriented computing of potential areas for wind turbines in jABC. By implementing a re-executable model the manual effort of a multi-criteria site analysis can be reduced. The aim is to determine the shift of typical geoprocessing tools of geographic information systems (GIS) from the desktop to the web. The analysis is based on a vector data set and mainly uses web services of the "Center for Spatial Information Science and Systems" (CSISS). This paper discusses effort, benefits and problems associated with the use of the web services.}, language = {en} } @article{ReynoldsSwainstonBendrups2015, author = {Reynolds, Nicholas and Swainston, Andrew and Bendrups, Faye}, title = {Music Technology and Computational Thinking}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82913}, pages = {363 -- 370}, year = {2015}, abstract = {A project involving the composition of a number of pieces of music by public participants revealed levels of engagement with and mastery of complex music technologies by a number of secondary student volunteers. This paper reports briefly on some initial findings of that project and seeks to illuminate an understanding of computational thinking across the curriculum.}, language = {en} } @article{Saito2015, author = {Saito, Toshinori}, title = {The Key Competencies in Informatics and ICT viewed from Nussbaum's Ten Central Capabilities}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82718}, pages = {253 -- 266}, year = {2015}, abstract = {This article shows a discussion about the key competencies in informatics and ICT viewed from a philosophical foundation presented by Martha Nussbaum, which is known as 'ten central capabilities'. Firstly, the outline of 'The Capability Approach', which has been presented by Amartya Sen and Nussbaum as a theoretical framework of assessing the state of social welfare, will be explained. Secondly, the body of Nussbaum's ten central capabilities and the reason for being applied as the basis of discussion will be shown. Thirdly, the relationship between the concept of 'capability' and 'competency' is to be discussed. After that, the author's assumption of the key competencies in informatics and ICT led from the examination of Nussbaum's ten capabilities will be presented.}, language = {en} } @article{SchaubWoltran2018, author = {Schaub, Torsten H. and Woltran, Stefan}, title = {Answer set programming unleashed!}, series = {K{\"u}nstliche Intelligenz}, volume = {32}, journal = {K{\"u}nstliche Intelligenz}, number = {2-3}, publisher = {Springer}, address = {Heidelberg}, issn = {0933-1875}, doi = {10.1007/s13218-018-0550-z}, pages = {105 -- 108}, year = {2018}, abstract = {Answer Set Programming faces an increasing popularity for problem solving in various domains. While its modeling language allows us to express many complex problems in an easy way, its solving technology enables their effective resolution. In what follows, we detail some of the key factors of its success. Answer Set Programming [ASP; Brewka et al. Commun ACM 54(12):92-103, (2011)] is seeing a rapid proliferation in academia and industry due to its easy and flexible way to model and solve knowledge-intense combinatorial (optimization) problems. To this end, ASP offers a high-level modeling language paired with high-performance solving technology. As a result, ASP systems provide out-off-the-box, general-purpose search engines that allow for enumerating (optimal) solutions. They are represented as answer sets, each being a set of atoms representing a solution. The declarative approach of ASP allows a user to concentrate on a problem's specification rather than the computational means to solve it. This makes ASP a prime candidate for rapid prototyping and an attractive tool for teaching key AI techniques since complex problems can be expressed in a succinct and elaboration tolerant way. This is eased by the tuning of ASP's modeling language to knowledge representation and reasoning (KRR). The resulting impact is nicely reflected by a growing range of successful applications of ASP [Erdem et al. AI Mag 37(3):53-68, 2016; Falkner et al. Industrial applications of answer set programming. K++nstliche Intelligenz (2018)]}, language = {en} } @article{Scheele2014, author = {Scheele, Lasse}, title = {Location analysis for placing artificial reefs}, series = {Process design for natural scientists: an agile model-driven approach}, journal = {Process design for natural scientists: an agile model-driven approach}, number = {500}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {216 -- 228}, year = {2014}, abstract = {Location analyses are among the most common tasks while working with spatial data and geographic information systems. Automating the most frequently used procedures is therefore an important aspect of improving their usability. In this context, this project aims to design and implement a workflow, providing some basic tools for a location analysis. For the implementation with jABC, the workflow was applied to the problem of finding a suitable location for placing an artificial reef. For this analysis three parameters (bathymetry, slope and grain size of the ground material) were taken into account, processed, and visualized with the The Generic Mapping Tools (GMT), which were integrated into the workflow as jETI-SIBs. The implemented workflow thereby showed that the approach to combine jABC with GMT resulted in an user-centric yet user-friendly tool with high-quality cartographic outputs.}, language = {en} } @article{Schiller2015, author = {Schiller, Thomas}, title = {Teaching Information Security (as Part of Key Competencies)}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82960}, pages = {401 -- 404}, year = {2015}, abstract = {The poster and abstract describe the importance of teaching information security in school. After a short description of information security and important aspects, I will show, how information security fits into different guidelines or models for computer science educations and that it is therefore on of the key competencies. Afterwards I will present you a rough insight of teaching information security in Austria.}, language = {en} } @article{Schulze2014, author = {Schulze, Gunnar}, title = {Workflow for rapid metagenome analysis}, series = {Process design for natural scientists: an agile model-driven approach}, journal = {Process design for natural scientists: an agile model-driven approach}, number = {500}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {88 -- 100}, year = {2014}, abstract = {Analyses of metagenomes in life sciences present new opportunities as well as challenges to the scientific community and call for advanced computational methods and workflows. The large amount of data collected from samples via next-generation sequencing (NGS) technologies render manual approaches to sequence comparison and annotation unsuitable. Rather, fast and efficient computational pipelines are needed to provide comprehensive statistics and summaries and enable the researcher to choose appropriate tools for more specific analyses. The workflow presented here builds upon previous pipelines designed for automated clustering and annotation of raw sequence reads obtained from next-generation sequencing technologies such as 454 and Illumina. Employing specialized algorithms, the sequence reads are processed at three different levels. First, raw reads are clustered at high similarity cutoff to yield clusters which can be exported as multifasta files for further analyses. Independently, open reading frames (ORFs) are predicted from raw reads and clustered at two strictness levels to yield sets of non-redundant sequences and ORF families. Furthermore, single ORFs are annotated by performing searches against the Pfam database}, language = {en} } @article{Schuett2014, author = {Sch{\"u}tt, Christine}, title = {Identification of differentially expressed genes}, series = {Process design for natural scientists: an agile model-driven approach}, journal = {Process design for natural scientists: an agile model-driven approach}, number = {500}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {127 -- 139}, year = {2014}, abstract = {With the jABC it is possible to realize workflows for numerous questions in different fields. The goal of this project was to create a workflow for the identification of differentially expressed genes. This is of special interest in biology, for it gives the opportunity to get a better insight in cellular changes due to exogenous stress, diseases and so on. With the knowledge that can be derived from the differentially expressed genes in diseased tissues, it becomes possible to find new targets for treatment.}, language = {en} } @article{Sens2014, author = {Sens, Henriette}, title = {Web-Based map generalization tools put to the test: a jABC workflow}, series = {Process Design for Natural Scientists: an agile model-driven approach}, journal = {Process Design for Natural Scientists: an agile model-driven approach}, number = {500}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {175 -- 185}, year = {2014}, abstract = {Geometric generalization is a fundamental concept in the digital mapping process. An increasing amount of spatial data is provided on the web as well as a range of tools to process it. This jABC workflow is used for the automatic testing of web-based generalization services like mapshaper.org by executing its functionality, overlaying both datasets before and after the transformation and displaying them visually in a .tif file. Mostly Web Services and command line tools are used to build an environment where ESRI shapefiles can be uploaded, processed through a chosen generalization service and finally visualized in Irfanview.}, language = {en} } @article{SteinertStabernack2022, author = {Steinert, Fritjof and Stabernack, Benno}, title = {Architecture of a low latency H.264/AVC video codec for robust ML based image classification how region of interests can minimize the impact of coding artifacts}, series = {Journal of Signal Processing Systems for Signal, Image, and Video Technology}, volume = {94}, journal = {Journal of Signal Processing Systems for Signal, Image, and Video Technology}, number = {7}, publisher = {Springer}, address = {New York}, issn = {1939-8018}, doi = {10.1007/s11265-021-01727-2}, pages = {693 -- 708}, year = {2022}, abstract = {The use of neural networks is considered as the state of the art in the field of image classification. A large number of different networks are available for this purpose, which, appropriately trained, permit a high level of classification accuracy. Typically, these networks are applied to uncompressed image data, since a corresponding training was also carried out using image data of similar high quality. However, if image data contains image errors, the classification accuracy deteriorates drastically. This applies in particular to coding artifacts which occur due to image and video compression. Typical application scenarios for video compression are narrowband transmission channels for which video coding is required but a subsequent classification is to be carried out on the receiver side. In this paper we present a special H.264/Advanced Video Codec (AVC) based video codec that allows certain regions of a picture to be coded with near constant picture quality in order to allow a reliable classification using neural networks, whereas the remaining image will be coded using constant bit rate. We have combined this feature with the ability to run with lowest latency properties, which is usually also required in remote control applications scenarios. The codec has been implemented as a fully hardwired High Definition video capable hardware architecture which is suitable for Field Programmable Gate Arrays.}, language = {en} } @article{SysłoKwiatkowska2015, author = {Sysło, Maciej M. and Kwiatkowska, Anna Beata}, title = {Think logarithmically!}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82923}, pages = {371 -- 380}, year = {2015}, abstract = {We discuss here a number of algorithmic topics which we use in our teaching and in learning of mathematics and informatics to illustrate and document the power of logarithm in designing very efficient algorithms and computations - logarithmic thinking is one of the most important key competencies for solving real world practical problems. We demonstrate also how to introduce logarithm independently of mathematical formalism using a conceptual model for reducing a problem size by at least half. It is quite surprising that the idea, which leads to logarithm, is present in Euclid's algorithm described almost 2000 years before John Napier invented logarithm.}, language = {en} } @article{TavakoliAlirezazadehHedayatipouretal.2021, author = {Tavakoli, Hamad and Alirezazadeh, Pendar and Hedayatipour, Ava and Nasib, A. H. Banijamali and Landwehr, Niels}, title = {Leaf image-based classification of some common bean cultivars using discriminative convolutional neural networks}, series = {Computers and electronics in agriculture : COMPAG online ; an international journal}, volume = {181}, journal = {Computers and electronics in agriculture : COMPAG online ; an international journal}, publisher = {Elsevier}, address = {Amsterdam [u.a.]}, issn = {0168-1699}, doi = {10.1016/j.compag.2020.105935}, pages = {11}, year = {2021}, abstract = {In recent years, many efforts have been made to apply image processing techniques for plant leaf identification. However, categorizing leaf images at the cultivar/variety level, because of the very low inter-class variability, is still a challenging task. In this research, we propose an automatic discriminative method based on convolutional neural networks (CNNs) for classifying 12 different cultivars of common beans that belong to three various species. We show that employing advanced loss functions, such as Additive Angular Margin Loss and Large Margin Cosine Loss, instead of the standard softmax loss function for the classification can yield better discrimination between classes and thereby mitigate the problem of low inter-class variability. The method was evaluated by classifying species (level I), cultivars from the same species (level II), and cultivars from different species (level III), based on images from the leaf foreside and backside. The results indicate that the performance of the classification algorithm on the leaf backside image dataset is superior. The maximum mean classification accuracies of 95.86, 91.37 and 86.87\% were obtained at the levels I, II and III, respectively. The proposed method outperforms the previous relevant works and provides a reliable approach for plant cultivars identification.}, language = {en} } @article{Teske2014, author = {Teske, Daniel}, title = {Geocoder accuracy ranking}, series = {Process design for natural scientists: an agile model-driven approach}, journal = {Process design for natural scientists: an agile model-driven approach}, number = {500}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {161 -- 174}, year = {2014}, abstract = {Finding an address on a map is sometimes tricky: the chosen map application may be unfamiliar with the enclosed region. There are several geocoders on the market, they have different databases and algorithms to compute the query. Consequently, the geocoding results differ in their quality. Fortunately the geocoders provide a rich set of metadata. The workflow described in this paper compares this metadata with the aim to find out which geocoder is offering the best-fitting coordinate for a given address.}, language = {en} } @article{Vierheller2014, author = {Vierheller, Janine}, title = {Exploratory Data Analysis}, series = {Process Design for Natural Scientists: an agile model-driven approach}, journal = {Process Design for Natural Scientists: an agile model-driven approach}, number = {500}, editor = {Lambrecht, Anna-Lena and Margaria, Tiziana}, publisher = {Axel Springer Verlag}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {110 -- 126}, year = {2014}, abstract = {In bioinformatics the term exploratory data analysis refers to different methods to get an overview of large biological data sets. Hence, it helps to create a framework for further analysis and hypothesis testing. The workflow facilitates this first important step of the data analysis created by high-throughput technologies. The results are different plots showing the structure of the measurements. The goal of the workflow is the automatization of the exploratory data analysis, but also the flexibility should be guaranteed. The basic tool is the free software R.}, language = {en} } @article{Webb2015, author = {Webb, Mary}, title = {Considerations for the Design of Computing Curricula}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82723}, pages = {267 -- 283}, year = {2015}, abstract = {This paper originated from discussions about the need for important changes in the curriculum for Computing including two focus group meetings at IFIP conferences over the last two years. The paper examines how recent developments in curriculum, together with insights from curriculum thinking in other subject areas, especially mathematics and science, can inform curriculum design for Computing. The analysis presented in the paper provides insights into the complexity of curriculum design as well as identifying important constraints and considerations for the ongoing development of a vision and framework for a Computing curriculum.}, language = {en} } @article{WegnerZenderLucke2015, author = {Wegner, Christian and Zender, Raphael and Lucke, Ulrike}, title = {ProtoSense}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82970}, pages = {405 -- 407}, year = {2015}, language = {en} } @article{Weigend2015, author = {Weigend, Michael}, title = {How Things Work}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82814}, pages = {285 -- 298}, year = {2015}, abstract = {Recognizing and defining functionality is a key competence adopted in all kinds of programming projects. This study investigates how far students without specific informatics training are able to identify and verbalize functions and parameters. It presents observations from classroom activities on functional modeling in high school chemistry lessons with altogether 154 students. Finally it discusses the potential of functional modelling to improve the comprehension of scientific content.}, language = {en} } @article{ZierisGerstbergerMueller2015, author = {Zieris, Holger and Gerstberger, Herbert and M{\"u}ller, Wolfgang}, title = {Using Arduino-Based Experiments to Integrate Computer Science Education and Natural Science}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82938}, pages = {381 -- 389}, year = {2015}, abstract = {Current curricular trends require teachers in Baden- Wuerttemberg (Germany) to integrate Computer Science (CS) into traditional subjects, such as Physical Science. However, concrete guidelines are missing. To fill this gap, we outline an approach where a microcontroller is used to perform and evaluate measurements in the Physical Science classroom. Using the open-source Arduino platform, we expect students to acquire and develop both CS and Physical Science competencies by using a self-programmed microcontroller. In addition to this combined development of competencies in Physical Science and CS, the subject matter will be embedded in suitable contexts and learning environments, such as weather and climate.}, language = {en} }