@article{EhlenzBergnerSchroeder2016, author = {Ehlenz, Matthias and Bergner, Nadine and Schroeder, Ulrik}, title = {Synergieeffekte zwischen Fach- und Lehramtsstudierenden in Softwarepraktika}, series = {Commentarii informaticae didacticae (CID)}, journal = {Commentarii informaticae didacticae (CID)}, number = {10}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, isbn = {978-3-86956-376-3}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-94875}, pages = {99 -- 102}, year = {2016}, abstract = {Dieser Beitrag diskutiert die Konzeption eines Software-Projektpraktikums im Bereich E-Learning, welches Lehramts- und Fachstudierenden der Informatik erm{\"o}glicht, voneinander zu profitieren und praxisrelevante Ergebnisse generiert. Vorbereitungen, Organisation und Durchf{\"u}hrung werden vorgestellt und diskutiert. Den Abschluss bildet ein Ausblick auf die Fortf{\"u}hrung des Konzepts und den Ausbau des Forschungsgebietes.}, language = {de} } @article{LaiDavisEickelmannetal.2015, author = {Lai, Kwok-Wing and Davis, Niki and Eickelmann, Birgit and Erstad, Ola and Fisser, Petra and Gibson, David and Khaddage, Ferial and Knezek, Gerald and Webb, Mary}, title = {Tackling Educational Challenges in a Digitally Networked World}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82997}, pages = {415 -- 423}, year = {2015}, language = {en} } @article{BarnesKennewell2015, author = {Barnes, Jan and Kennewell, Steve}, title = {Teacher Perceptions of Key Competencies in ICT}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82604}, pages = {61 -- 75}, year = {2015}, abstract = {Regardless of what is intended by government curriculum specifications and advised by educational experts, the competencies taught and learned in and out of classrooms can vary considerably. In this paper, we discuss in particular how we can investigate the perceptions that individual teachers have of competencies in ICT, and how these and other factors may influence students' learning. We report case study research which identifies contradictions within the teaching of ICT competencies as an activity system, highlighting issues concerning the object of the curriculum, the roles of the participants and the school cultures. In a particular case, contradictions in the learning objectives between higher order skills and the use of application tools have been resolved by a change in the teacher's perceptions which have not led to changes in other aspects of the activity system. We look forward to further investigation of the effects of these contradictions in other case studies and on forthcoming curriculum change.}, language = {en} } @article{GrillenbergerRomeike2015, author = {Grillenberger, Andreas and Romeike, Ralf}, title = {Teaching Data Management}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82648}, pages = {133 -- 150}, year = {2015}, abstract = {Data management is a central topic in computer science as well as in computer science education. Within the last years, this topic is changing tremendously, as its impact on daily life becomes increasingly visible. Nowadays, everyone not only needs to manage data of various kinds, but also continuously generates large amounts of data. In addition, Big Data and data analysis are intensively discussed in public dialogue because of their influences on society. For the understanding of such discussions and for being able to participate in them, fundamental knowledge on data management is necessary. Especially, being aware of the threats accompanying the ability to analyze large amounts of data in nearly real-time becomes increasingly important. This raises the question, which key competencies are necessary for daily dealings with data and data management. In this paper, we will first point out the importance of data management and of Big Data in daily life. On this basis, we will analyze which are the key competencies everyone needs concerning data management to be able to handle data in a proper way in daily life. Afterwards, we will discuss the impact of these changes in data management on computer science education and in particular database education.}, language = {en} } @article{Schiller2015, author = {Schiller, Thomas}, title = {Teaching Information Security (as Part of Key Competencies)}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82960}, pages = {401 -- 404}, year = {2015}, abstract = {The poster and abstract describe the importance of teaching information security in school. After a short description of information security and important aspects, I will show, how information security fits into different guidelines or models for computer science educations and that it is therefore on of the key competencies. Afterwards I will present you a rough insight of teaching information security in Austria.}, language = {en} } @misc{Frank2013, type = {Master Thesis}, author = {Frank, Mario}, title = {TEMPLAR : efficient determination of relevant axioms in big formula sets for theorem proving}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-72112}, school = {Universit{\"a}t Potsdam}, year = {2013}, abstract = {This document presents a formula selection system for classical first order theorem proving based on the relevance of formulae for the proof of a conjecture. It is based on unifiability of predicates and is also able to use a linguistic approach for the selection. The scope of the technique is the reduction of the set of formulae and the increase of the amount of provable conjectures in a given time. Since the technique generates a subset of the formula set, it can be used as a preprocessor for automated theorem proving. The document contains the conception, implementation and evaluation of both selection concepts. While the one concept generates a search graph over the negation normal forms or Skolem normal forms of the given formulae, the linguistic concept analyses the formulae and determines frequencies of lexemes and uses a tf-idf weighting algorithm to determine the relevance of the formulae. Though the concept is built for first order logic, it is not limited to it. The concept can be used for higher order and modal logik, too, with minimal adoptions. The system was also evaluated at the world championship of automated theorem provers (CADE ATP Systems Competition, CASC-24) in combination with the leanCoP theorem prover and the evaluation of the results of the CASC and the benchmarks with the problems of the CASC of the year 2012 (CASC-J6) show that the concept of the system has positive impact to the performance of automated theorem provers. Also, the benchmarks with two different theorem provers which use different calculi have shown that the selection is independent from the calculus. Moreover, the concept of TEMPLAR has shown to be competitive to some extent with the concept of SinE and even helped one of the theorem provers to solve problems that were not (or slower) solved with SinE selection in the CASC. Finally, the evaluation implies that the combination of the unification based and linguistic selection yields more improved results though no optimisation was done for the problems.}, language = {en} } @phdthesis{Huang2006, author = {Huang, Wanjun}, title = {Temporary binding for dynamic middleware construction and web services composition}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-7672}, school = {Universit{\"a}t Potsdam}, year = {2006}, abstract = {With increasing number of applications in Internet and mobile environments, distributed software systems are demanded to be more powerful and flexible, especially in terms of dynamism and security. This dissertation describes my work concerning three aspects: dynamic reconfiguration of component software, security control on middleware applications, and web services dynamic composition. Firstly, I proposed a technology named Routing Based Workflow (RBW) to model the execution and management of collaborative components and realize temporary binding for component instances. The temporary binding means component instances are temporarily loaded into a created execution environment to execute their functions, and then are released to their repository after executions. The temporary binding allows to create an idle execution environment for all collaborative components, on which the change operations can be immediately carried out. The changes on execution environment will result in a new collaboration of all involved components, and also greatly simplifies the classical issues arising from dynamic changes, such as consistency preserving etc. To demonstrate the feasibility of RBW, I created a dynamic secure middleware system - the Smart Data Server Version 3.0 (SDS3). In SDS3, an open source implementation of CORBA is adopted and modified as the communication infrastructure, and three secure components managed by RBW, are created to enhance the security on the access of deployed applications. SDS3 offers multi-level security control on its applications from strategy control to application-specific detail control. For the management by RBW, the strategy control of SDS3 applications could be dynamically changed by reorganizing the collaboration of the three secure components. In addition, I created the Dynamic Services Composer (DSC) based on Apache open source projects, Apache Axis and WSIF. In DSC, RBW is employed to model the interaction and collaboration of web services and to enable the dynamic changes on the flow structure of web services. Finally, overall performance tests were made to evaluate the efficiency of the developed RBW and SDS3. The results demonstrated that temporary binding of component instances makes slight impacts on the execution efficiency of components, and the blackout time arising from dynamic changes can be extremely reduced in any applications.}, subject = {Middleware}, language = {en} } @misc{HollmannFrohmeEndrullatetal.2020, author = {Hollmann, Susanne and Frohme, Marcus and Endrullat, Christoph and Kremer, Andreas and D'Elia, Domenica and Regierer, Babette and Nechyporenko, Alina}, title = {Ten simple rules on how to write a standard operating procedure}, series = {Postprints der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, number = {9}, issn = {1866-8372}, doi = {10.25932/publishup-52587}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-525877}, pages = {12}, year = {2020}, abstract = {Research publications and data nowadays should be publicly available on the internet and, theoretically, usable for everyone to develop further research, products, or services. The long-term accessibility of research data is, therefore, fundamental in the economy of the research production process. However, the availability of data is not sufficient by itself, but also their quality must be verifiable. Measures to ensure reuse and reproducibility need to include the entire research life cycle, from the experimental design to the generation of data, quality control, statistical analysis, interpretation, and validation of the results. Hence, high-quality records, particularly for providing a string of documents for the verifiable origin of data, are essential elements that can act as a certificate for potential users (customers). These records also improve the traceability and transparency of data and processes, therefore, improving the reliability of results. Standards for data acquisition, analysis, and documentation have been fostered in the last decade driven by grassroot initiatives of researchers and organizations such as the Research Data Alliance (RDA). Nevertheless, what is still largely missing in the life science academic research are agreed procedures for complex routine research workflows. Here, well-crafted documentation like standard operating procedures (SOPs) offer clear direction and instructions specifically designed to avoid deviations as an absolute necessity for reproducibility. Therefore, this paper provides a standardized workflow that explains step by step how to write an SOP to be used as a starting point for appropriate research documentation.}, language = {en} } @phdthesis{Lorenz2011, author = {Lorenz, Haik}, title = {Texturierung und Visualisierung virtueller 3D-Stadtmodelle}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-53879}, school = {Universit{\"a}t Potsdam}, year = {2011}, abstract = {Im Mittelpunkt dieser Arbeit stehen virtuelle 3D-Stadtmodelle, die Objekte, Ph{\"a}nomene und Prozesse in urbanen R{\"a}umen in digitaler Form repr{\"a}sentieren. Sie haben sich zu einem Kernthema von Geoinformationssystemen entwickelt und bilden einen zentralen Bestandteil geovirtueller 3D-Welten. Virtuelle 3D-Stadtmodelle finden nicht nur Verwendung als Mittel f{\"u}r Experten in Bereichen wie Stadtplanung, Funknetzplanung, oder L{\"a}rmanalyse, sondern auch f{\"u}r allgemeine Nutzer, die realit{\"a}tsnah dargestellte virtuelle St{\"a}dte in Bereichen wie B{\"u}rgerbeteiligung, Tourismus oder Unterhaltung nutzen und z. B. in Anwendungen wie GoogleEarth eine r{\"a}umliche Umgebung intuitiv erkunden und durch eigene 3D-Modelle oder zus{\"a}tzliche Informationen erweitern. Die Erzeugung und Darstellung virtueller 3D-Stadtmodelle besteht aus einer Vielzahl von Prozessschritten, von denen in der vorliegenden Arbeit zwei n{\"a}her betrachtet werden: Texturierung und Visualisierung. Im Bereich der Texturierung werden Konzepte und Verfahren zur automatischen Ableitung von Fototexturen aus georeferenzierten Schr{\"a}gluftbildern sowie zur Speicherung oberfl{\"a}chengebundener Daten in virtuellen 3D-Stadtmodellen entwickelt. Im Bereich der Visualisierung werden Konzepte und Verfahren f{\"u}r die multiperspektivische Darstellung sowie f{\"u}r die hochqualitative Darstellung nichtlinearer Projektionen virtueller 3D-Stadtmodelle in interaktiven Systemen vorgestellt. Die automatische Ableitung von Fototexturen aus georeferenzierten Schr{\"a}gluftbildern erm{\"o}glicht die Veredelung vorliegender virtueller 3D-Stadtmodelle. Schr{\"a}gluftbilder bieten sich zur Texturierung an, da sie einen Großteil der Oberfl{\"a}chen einer Stadt, insbesondere Geb{\"a}udefassaden, mit hoher Redundanz erfassen. Das Verfahren extrahiert aus dem verf{\"u}gbaren Bildmaterial alle Ansichten einer Oberfl{\"a}che und f{\"u}gt diese pixelpr{\"a}zise zu einer Textur zusammen. Durch Anwendung auf alle Oberfl{\"a}chen wird das virtuelle 3D-Stadtmodell fl{\"a}chendeckend texturiert. Der beschriebene Ansatz wurde am Beispiel des offiziellen Berliner 3D-Stadtmodells sowie der in GoogleEarth integrierten Innenstadt von M{\"u}nchen erprobt. Die Speicherung oberfl{\"a}chengebundener Daten, zu denen auch Texturen z{\"a}hlen, wurde im Kontext von CityGML, einem international standardisierten Datenmodell und Austauschformat f{\"u}r virtuelle 3D-Stadtmodelle, untersucht. Es wird ein Datenmodell auf Basis computergrafischer Konzepte entworfen und in den CityGML-Standard integriert. Dieses Datenmodell richtet sich dabei an praktischen Anwendungsf{\"a}llen aus und l{\"a}sst sich dom{\"a}nen{\"u}bergreifend verwenden. Die interaktive multiperspektivische Darstellung virtueller 3D-Stadtmodelle erg{\"a}nzt die gewohnte perspektivische Darstellung nahtlos um eine zweite Perspektive mit dem Ziel, den Informationsgehalt der Darstellung zu erh{\"o}hen. Diese Art der Darstellung ist durch die Panoramakarten von H. C. Berann inspiriert; Hauptproblem ist die {\"U}bertragung des multiperspektivischen Prinzips auf ein interaktives System. Die Arbeit stellt eine technische Umsetzung dieser Darstellung f{\"u}r 3D-Grafikhardware vor und demonstriert die Erweiterung von Vogel- und Fußg{\"a}ngerperspektive. Die hochqualitative Darstellung nichtlinearer Projektionen beschreibt deren Umsetzung auf 3D-Grafikhardware, wobei neben der Bildwiederholrate die Bildqualit{\"a}t das wesentliche Entwicklungskriterium ist. Insbesondere erlauben die beiden vorgestellten Verfahren, dynamische Geometrieverfeinerung und st{\"u}ckweise perspektivische Projektionen, die uneingeschr{\"a}nkte Nutzung aller hardwareseitig verf{\"u}gbaren, qualit{\"a}tssteigernden Funktionen wie z.~B. Bildraumgradienten oder anisotroper Texturfilterung. Beide Verfahren sind generisch und unterst{\"u}tzen verschiedene Projektionstypen. Sie erm{\"o}glichen die anpassungsfreie Verwendung g{\"a}ngiger computergrafischer Effekte wie Stilisierungsverfahren oder prozeduraler Texturen f{\"u}r nichtlineare Projektionen bei optimaler Bildqualit{\"a}t. Die vorliegende Arbeit beschreibt wesentliche Technologien f{\"u}r die Verarbeitung virtueller 3D-Stadtmodelle: Zum einen lassen sich mit den Ergebnissen der Arbeit Texturen f{\"u}r virtuelle 3D-Stadtmodelle automatisiert herstellen und als eigenst{\"a}ndige Attribute in das virtuelle 3D-Stadtmodell einf{\"u}gen. Somit tr{\"a}gt diese Arbeit dazu bei, die Herstellung und Fortf{\"u}hrung texturierter virtueller 3D-Stadtmodelle zu verbessern. Zum anderen zeigt die Arbeit Varianten und technische L{\"o}sungen f{\"u}r neuartige Projektionstypen f{\"u}r virtueller 3D-Stadtmodelle in interaktiven Visualisierungen. Solche nichtlinearen Projektionen stellen Schl{\"u}sselbausteine dar, um neuartige Benutzungsschnittstellen f{\"u}r und Interaktionsformen mit virtuellen 3D-Stadtmodellen zu erm{\"o}glichen, insbesondere f{\"u}r mobile Ger{\"a}te und immersive Umgebungen.}, language = {de} } @article{Saito2015, author = {Saito, Toshinori}, title = {The Key Competencies in Informatics and ICT viewed from Nussbaum's Ten Central Capabilities}, series = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, journal = {KEYCIT 2014 - Key Competencies in Informatics and ICT}, number = {7}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, issn = {1868-0844}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-82718}, pages = {253 -- 266}, year = {2015}, abstract = {This article shows a discussion about the key competencies in informatics and ICT viewed from a philosophical foundation presented by Martha Nussbaum, which is known as 'ten central capabilities'. Firstly, the outline of 'The Capability Approach', which has been presented by Amartya Sen and Nussbaum as a theoretical framework of assessing the state of social welfare, will be explained. Secondly, the body of Nussbaum's ten central capabilities and the reason for being applied as the basis of discussion will be shown. Thirdly, the relationship between the concept of 'capability' and 'competency' is to be discussed. After that, the author's assumption of the key competencies in informatics and ICT led from the examination of Nussbaum's ten capabilities will be presented.}, language = {en} }