@inproceedings{Watson2008, author = {Watson, Bruce W.}, title = {Advances in automata implementation techniques (Abstract)}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27094}, year = {2008}, language = {en} } @inproceedings{Barthelemy2008, author = {Barth{\´e}lemy, Fran{\c{c}}ois}, title = {Finite-state compilation of feature structures for two-level morphology}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27120}, year = {2008}, abstract = {This paper describes a two-level formalism where feature structures are used in contextual rules. Whereas usual two-level grammars describe rational sets over symbol pairs, this new formalism uses tree structured regular expressions. They allow an explicit and precise definition of the scope of feature structures. A given surface form may be described using several feature structures. Feature unification is expressed in contextual rules using variables, like in a unification grammar. Grammars are compiled in finite state multi-tape transducers.}, language = {en} } @inproceedings{BlancConstantWatrin2008, author = {Blanc, Olivier and Constant, Matthieu and Watrin, Patrick}, title = {Segmentation in super-chunks with a finite-state approach}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27133}, year = {2008}, abstract = {Since Harris' parser in the late 50s, multiword units have been progressively integrated in parsers. Nevertheless, in the most part, they are still restricted to compound words, that are more stable and less numerous. Actually, language is full of semi-fixed expressions that also form basic semantic units: semi-fixed adverbial expressions (e.g. time), collocations. Like compounds, the identification of these structures limits the combinatorial complexity induced by lexical ambiguity. In this paper, we detail an experiment that largely integrates these notions in a finite-state procedure of segmentation into super-chunks, preliminary to a parser.We show that the chunker, developped for French, reaches 92.9\% precision and 98.7\% recall. Moreover, multiword units realize 36.6\% of the attachments within nominal and prepositional phrases.}, language = {en} } @inproceedings{BonfanteLeRoux2008, author = {Bonfante, Guillaume and Le Roux, Joseph}, title = {Intersection optimization is NP-complete}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27146}, year = {2008}, abstract = {Finite state methods for natural language processing often require the construction and the intersection of several automata. In this paper, we investigate the question of determining the best order in which these intersections should be performed. We take as an example lexical disambiguation in polarity grammars. We show that there is no efficient way to minimize the state complexity of these intersections.}, language = {en} } @inproceedings{Daciuk2008, author = {Daciuk, Jan}, title = {Perfect hashing tree automata}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27163}, year = {2008}, abstract = {We present an algorithm that computes a function that assigns consecutive integers to trees recognized by a deterministic, acyclic, finite-state, bottom-up tree automaton. Such function is called minimal perfect hashing. It can be used to identify trees recognized by the automaton. Its value may be seen as an index in some other data structures. We also present an algorithm for inverted hashing.}, language = {en} } @inproceedings{PadroPadro2008, author = {Padr{\´o}, Muntsa and Padr{\´o}, Llu{\´i}s}, title = {ME-CSSR : an extension of CSSR using maximum entropy models}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27210}, year = {2008}, abstract = {In this work an extension of CSSR algorithm using Maximum Entropy Models is introduced. Preliminary experiments to perform Named Entity Recognition with this new system are presented.}, language = {en} } @inproceedings{BoegelButtHautlietal.2008, author = {B{\"o}gel, Tina and Butt, Miriam and Hautli, Annette and Sulger, Sebastian}, title = {Developing a finite-state morphological analyzer for Urdu and Hindi}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27155}, year = {2008}, abstract = {We introduce and discuss a number of issues that arise in the process of building a finite-state morphological analyzer for Urdu, in particular issues with potential ambiguity and non-concatenative morphology. Our approach allows for an underlyingly similar treatment of both Urdu and Hindi via a cascade of finite-state transducers that transliterates the very different scripts into a common ASCII transcription system. As this transliteration system is based on the XFST tools that the Urdu/Hindi common morphological analyzer is also implemented in, no compatibility problems arise.}, language = {en} } @inproceedings{YliJyrae2008, author = {Yli-Jyr{\"a}, Anssi}, title = {Applications of diamonded double negation}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27109}, year = {2008}, abstract = {Nested complementation plays an important role in expressing counter- i.e. star-free and first-order definable languages and their hierarchies. In addition, methods that compile phonological rules into finite-state networks use double-nested complementation or "double negation". This paper reviews how the double-nested complementation extends to a relatively new operation, generalized restriction (GR), coined by the author (Yli-Jyr{\"a} and Koskenniemi 2004). This operation encapsulates a double-nested complementation and elimination of a concatenation marker, diamond, whose finite occurrences align concatenations in the arguments of the operation. The paper demonstrates that the GR operation has an interesting potential in expressing regular languages, various kinds of grammars, bimorphisms and relations. This motivates a further study of optimized implementation of the operator.}, language = {en} } @inproceedings{BarbaianiCanceddaDanceetal.2008, author = {Barbaiani, Mădălina and Cancedda, Nicola and Dance, Chris and Fazekas, Szil{\´a}rd and Ga{\´a}l, Tam{\´a}s and Gaussier, {\´E}ric}, title = {Asymmetric term alignment with selective contiguity constraints by multi-tape automata}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27115}, year = {2008}, abstract = {This article describes a HMM-based word-alignment method that can selectively enforce a contiguity constraint. This method has a direct application in the extraction of a bilingual terminological lexicon from a parallel corpus, but can also be used as a preliminary step for the extraction of phrase pairs in a Phrase-Based Statistical Machine Translation system. Contiguous source words composing terms are aligned to contiguous target language words. The HMM is transformed into a Weighted Finite State Transducer (WFST) and contiguity constraints are enforced by specific multi-tape WFSTs. The proposed method is especially suited when basic linguistic resources (morphological analyzer, part-of-speech taggers and term extractors) are available for the source language only.}, language = {en} } @inproceedings{Karttunen2008, author = {Karttunen, Lauri}, title = {New features in PARC finite state toolkits (Abstract)}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27085}, year = {2008}, language = {en} } @inproceedings{YliJyrae2008, author = {Yli-Jyr{\"a}, Anssi}, title = {Transducers from parallel replace rules and modes with generalized lenient composition}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27246}, year = {2008}, abstract = {Generalized Two-Level Grammar (GTWOL) provides a new method for compilation of parallel replacement rules into transducers. The current paper identifies the role of generalized lenient composition (GLC) in this method. Thanks to the GLC operation, the compilation method becomes bipartite and easily extendible to capture various application modes. In the light of three notions of obligatoriness, a modification to the compilation method is proposed. We argue that the bipartite design makes implementation of parallel obligatoriness, directionality, length and rank based application modes extremely easy, which is the main result of the paper.}, language = {en} } @inproceedings{Saleschus2008, author = {Sal{\´e}schus, Dirk}, title = {On resolving long distance dependencies in Russian verbs}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27235}, year = {2008}, abstract = {Morphological analyses based on word syntax approaches can encounter difficulties with long distance dependencies. The reason is that in some cases an affix has to have access to the inner structure of the form with which it combines. One solution is the percolation of features from ther inner morphemes to the outer morphemes with some process of feature unification. However, the obstacle of percolation constraints or stipulated features has lead some linguists to argue in favour of other frameworks such as, e.g., realizational morphology or parallel approaches like optimality theory. This paper proposes a linguistic analysis of two long distance dependencies in the morphology of Russian verbs, namely secondary imperfectivization and deverbal nominalization.We show how these processes can be reanalysed as local dependencies. Although finitestate frameworks are not bound by such linguistically motivated considerations, we present an implementation of our analysis as proposed in [1] that does not complicate the grammar or enlarge the network unproportionally.}, language = {en} } @inproceedings{IlarrazaGojenolaOronozetal.2008, author = {Ilarraza, Arantza D{\´i}az de and Gojenola, Koldo and Oronoz, Maite and Otaegi, Maialen and Alegria, I{\~n}aki}, title = {Syntactic error detection and correction in date expressions using finite-state transducers}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27180}, year = {2008}, abstract = {This paper presents a system for the detection and correction of syntactic errors. It combines a robust morphosyntactic analyser and two groups of finite-state transducers specified using the Xerox Finite State Tool (xfst). One of the groups is used for the description of syntactic error patterns while the second one is used for the correction of the detected errors. The system has been tested on a corpus of real texts, containing both correct and incorrect sentences, with good results.}, language = {en} } @inproceedings{Fernando2008, author = {Fernando, Tim}, title = {Temporal propositions as regular languages}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27194}, year = {2008}, abstract = {Temporal propositions are mapped to sets of strings that witness (in a precise sense) the propositions over discrete linear Kripke frames. The strings are collected into regular languages to ensure the decidability of entailments given by inclusions between languages. (Various notions of bounded entailment are shown to be expressible as language inclusions.) The languages unwind computations implicit in the logical (and temporal) connectives via a system of finite-state constraints adapted from finite-state morphology. Applications to Hybrid Logic and non-monotonic inertial reasoning are briefly considered.}, language = {en} } @inproceedings{Didakowski2008, author = {Didakowski, J{\"o}rg}, title = {SynCoP : combining syntactic tagging with chunking using weighted finite state transducers}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27172}, year = {2008}, abstract = {This paper describes the key aspects of the system SynCoP (Syntactic Constraint Parser) developed at the Berlin-Brandenburgische Akademie der Wissenschaften. The parser allows to combine syntactic tagging and chunking by means of constraint grammar using weighted finite state transducers (WFST). Chunks are interpreted as local dependency structures within syntactic tagging. The linguistic theories are formulated by criteria which are formalized by a semiring; these criteria allow structural preferences and gradual grammaticality. The parser is essentially a cascade of WFSTs. To find the most likely syntactic readings a best-path search is used.}, language = {en} } @inproceedings{ZarriessSeeker2008, author = {Zarrieß, Sina and Seeker, Wolfgang}, title = {Finite-state rule deduction for parsing non-constituent coordination}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27254}, year = {2008}, abstract = {In this paper, we present a finite-state approach to constituency and therewith an analysis of coordination phenomena involving so-called non-constituents. We show that non-constituents can be seen as parts of fully-fledged constituents and therefore be coordinated in the same way. We have implemented an algorithm based on finite state automata that generates an LFG grammar assigning valid analyses to non-constituent coordination structures in the German language.}, language = {en} } @inproceedings{GonzalezCasacuberta2008, author = {Gonz{\´a}lez, Jorge and Casacuberta, Francisco}, title = {Phrase-based finite state models}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-27209}, year = {2008}, abstract = {In the last years, statistical machine translation has already demonstrated its usefulness within a wide variety of translation applications. In this line, phrase-based alignment models have become the reference to follow in order to build competitive systems. Finite state models are always an interesting framework because there are well-known efficient algorithms for their representation and manipulation. This document is a contribution to the evolution of finite state models towards a phrase-based approach. The inference of stochastic transducers that are based on bilingual phrases is carefully analysed from a finite state point of view. Indeed, the algorithmic phenomena that have to be taken into account in order to deal with such phrase-based finite state models when in decoding time are also in-depth detailed.}, language = {en} } @inproceedings{BandaGallagher2010, author = {Banda, Gourinath and Gallagher, John P.}, title = {Constraint-based abstraction of a model checker for infinite state systems}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41516}, year = {2010}, abstract = {Abstract interpretation-based model checking provides an approach to verifying properties of infinite-state systems. In practice, most previous work on abstract model checking is either restricted to verifying universal properties, or develops special techniques for temporal logics such as modal transition systems or other dual transition systems. By contrast we apply completely standard techniques for constructing abstract interpretations to the abstraction of a CTL semantic function, without restricting the kind of properties that can be verified. Furthermore we show that this leads directly to implementation of abstract model checking algorithms for abstract domains based on constraints, making use of an SMT solver.}, language = {en} } @inproceedings{GoltzPieth2010, author = {Goltz, Hans-Joachim and Pieth, Norbert}, title = {A tool for generating partition schedules of multiprocessor systems}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41556}, year = {2010}, abstract = {A deterministic cycle scheduling of partitions at the operating system level is supposed for a multiprocessor system. In this paper, we propose a tool for generating such schedules. We use constraint based programming and develop methods and concepts for a combined interactive and automatic partition scheduling system. This paper is also devoted to basic methods and techniques for modeling and solving this partition scheduling problem. Initial application of our partition scheduling tool has proved successful and demonstrated the suitability of the methods used.}, language = {en} } @inproceedings{Schrijvers2010, author = {Schrijvers, Tom}, title = {Overview of the monadic constraint programming framework}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41411}, year = {2010}, abstract = {A constraint programming system combines two essential components: a constraint solver and a search engine. The constraint solver reasons about satisfiability of conjunctions of constraints, and the search engine controls the search for solutions by iteratively exploring a disjunctive search tree defined by the constraint program. The Monadic Constraint Programming framework gives a monadic definition of constraint programming where the solver is defined as a monad threaded through the monadic search tree. Search and search strategies can then be defined as firstclass objects that can themselves be built or extended by composable search transformers. Search transformers give a powerful and unifying approach to viewing search in constraint programming, and the resulting constraint programming system is first class and extremely flexible.}, language = {en} } @inproceedings{HerreHummel2010, author = {Herre, Heinrich and Hummel, Axel}, title = {Stationary generated models of generalized logic programs}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41501}, year = {2010}, abstract = {The interest in extensions of the logic programming paradigm beyond the class of normal logic programs is motivated by the need of an adequate representation and processing of knowledge. One of the most difficult problems in this area is to find an adequate declarative semantics for logic programs. In the present paper a general preference criterion is proposed that selects the 'intended' partial models of generalized logic programs which is a conservative extension of the stationary semantics for normal logic programs of [Prz91]. The presented preference criterion defines a partial model of a generalized logic program as intended if it is generated by a stationary chain. It turns out that the stationary generated models coincide with the stationary models on the class of normal logic programs. The general wellfounded semantics of such a program is defined as the set-theoretical intersection of its stationary generated models. For normal logic programs the general wellfounded semantics equals the wellfounded semantics.}, language = {en} } @inproceedings{AbdennadherIsmailKhoury2010, author = {Abdennadher, Slim and Ismail, Haythem and Khoury, Frederick}, title = {Transforming imperative algorithms to constraint handling rules}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41533}, year = {2010}, abstract = {Different properties of programs, implemented in Constraint Handling Rules (CHR), have already been investigated. Proving these properties in CHR is fairly simpler than proving them in any type of imperative programming language, which triggered the proposal of a methodology to map imperative programs into equivalent CHR. The equivalence of both programs implies that if a property is satisfied for one, then it is satisfied for the other. The mapping methodology could be put to other beneficial uses. One such use is the automatic generation of global constraints, at an attempt to demonstrate the benefits of having a rule-based implementation for constraint solvers.}, language = {en} } @inproceedings{BetzRaiserFruehwirth2010, author = {Betz, Hariolf and Raiser, Frank and Fr{\"u}hwirth, Thom}, title = {Persistent constraints in constraint handling rules}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41547}, year = {2010}, abstract = {In the most abstract definition of its operational semantics, the declarative and concurrent programming language CHR is trivially non-terminating for a significant class of programs. Common refinements of this definition, in closing the gap to real-world implementations, compromise on declarativity and/or concurrency. Building on recent work and the notion of persistent constraints, we introduce an operational semantics avoiding trivial non-termination without compromising on its essential features.}, language = {en} } @inproceedings{OetschSchwengererTompits2010, author = {Oetsch, Johannes and Schwengerer, Martin and Tompits, Hans}, title = {Kato: a plagiarism-detection tool for answer-set programs}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41485}, year = {2010}, abstract = {We present the tool Kato which is, to the best of our knowledge, the first tool for plagiarism detection that is directly tailored for answer-set programming (ASP). Kato aims at finding similarities between (segments of) logic programs to help detecting cases of plagiarism. Currently, the tool is realised for DLV programs but it is designed to handle various logic-programming syntax versions. We review basic features and the underlying methodology of the tool.}, language = {en} } @inproceedings{Zhou2010, author = {Zhou, Neng-Fa}, title = {What I have learned from all these solver competitions}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41431}, year = {2010}, abstract = {In this talk, I would like to share my experiences gained from participating in four CSP solver competitions and the second ASP solver competition. In particular, I'll talk about how various programming techniques can make huge differences in solving some of the benchmark problems used in the competitions. These techniques include global constraints, table constraints, and problem-specific propagators and labeling strategies for selecting variables and values. I'll present these techniques with experimental results from B-Prolog and other CLP(FD) systems.}, language = {en} } @inproceedings{HanusKoschnicke2010, author = {Hanus, Michael and Koschnicke, Sven}, title = {An ER-based framework for declarative web programming}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41447}, year = {2010}, abstract = {We describe a framework to support the implementation of web-based systems to manipulate data stored in relational databases. Since the conceptual model of a relational database is often specified as an entity-relationship (ER) model, we propose to use the ER model to generate a complete implementation in the declarative programming language Curry. This implementation contains operations to create and manipulate entities of the data model, supports authentication, authorization, session handling, and the composition of individual operations to user processes. Furthermore and most important, the implementation ensures the consistency of the database w.r.t. the data dependencies specified in the ER model, i.e., updates initiated by the user cannot lead to an inconsistent state of the database. In order to generate a high-level declarative implementation that can be easily adapted to individual customer requirements, the framework exploits previous works on declarative database programming and web user interface construction in Curry.}, language = {en} } @inproceedings{GeskeWolf2010, author = {Geske, Ulrich and Wolf, Armin}, title = {Preface}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41401}, year = {2010}, abstract = {The workshops on (constraint) logic programming (WLP) are the annual meeting of the Society of Logic Programming (GLP e.V.) and bring together researchers interested in logic programming, constraint programming, and related areas like databases, artificial intelligence and operations research. In this decade, previous workshops took place in Dresden (2008), W{\"u}rzburg (2007), Vienna (2006), Ulm (2005), Potsdam (2004), Dresden (2002), Kiel (2001), and W{\"u}rzburg (2000). Contributions to workshops deal with all theoretical, experimental, and application aspects of constraint programming (CP) and logic programming (LP), including foundations of constraint/ logic programming. Some of the special topics are constraint solving and optimization, extensions of functional logic programming, deductive databases, data mining, nonmonotonic reasoning, , interaction of CP/LP with other formalisms like agents, XML, JAVA, program analysis, program transformation, program verification, meta programming, parallelism and concurrency, answer set programming, implementation and software techniques (e.g., types, modularity, design patterns), applications (e.g., in production, environment, education, internet), constraint/logic programming for semantic web systems and applications, reasoning on the semantic web, data modelling for the web, semistructured data, and web query languages.}, language = {en} } @inproceedings{Cabalar2010, author = {Cabalar, Pedro}, title = {Existential quantifiers in the rule body}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41476}, year = {2010}, abstract = {In this paper we consider a simple syntactic extension of Answer Set Programming (ASP) for dealing with (nested) existential quantifiers and double negation in the rule bodies, in a close way to the recent proposal RASPL-1. The semantics for this extension just resorts to Equilibrium Logic (or, equivalently, to the General Theory of Stable Models), which provides a logic-programming interpretation for any arbitrary theory in the syntax of Predicate Calculus. We present a translation of this syntactic class into standard logic programs with variables (either disjunctive or normal, depending on the input rule heads), as those allowed by current ASP solvers. The translation relies on the introduction of auxiliary predicates and the main result shows that it preserves strong equivalence modulo the original signature.}, language = {en} } @inproceedings{HerreHummel2010, author = {Herre, Heinrich and Hummel, Axel}, title = {A paraconsistent semantics for generalized logic programs}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41496}, year = {2010}, abstract = {We propose a paraconsistent declarative semantics of possibly inconsistent generalized logic programs which allows for arbitrary formulas in the body and in the head of a rule (i.e. does not depend on the presence of any specific connective, such as negation(-as-failure), nor on any specific syntax of rules). For consistent generalized logic programs this semantics coincides with the stable generated models introduced in [HW97], and for normal logic programs it yields the stable models in the sense of [GL88].}, language = {en} } @inproceedings{Seipel2010, author = {Seipel, Dietmar}, title = {Practical Applications of Extended Deductive Databases in DATALOG*}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41457}, year = {2010}, abstract = {A wide range of additional forward chaining applications could be realized with deductive databases, if their rule formalism, their immediate consequence operator, and their fixpoint iteration process would be more flexible. Deductive databases normally represent knowledge using stratified Datalog programs with default negation. But many practical applications of forward chaining require an extensible set of user-defined built-in predicates. Moreover, they often need function symbols for building complex data structures, and the stratified fixpoint iteration has to be extended by aggregation operations. We present an new language Datalog*, which extends Datalog by stratified meta-predicates (including default negation), function symbols, and user-defined built-in predicates, which are implemented and evaluated top-down in Prolog. All predicates are subject to the same backtracking mechanism. The bottom-up fixpoint iteration can aggregate the derived facts after each iteration based on user-defined Prolog predicates.}, language = {en} } @inproceedings{Brass2010, author = {Brass, Stefan}, title = {Range restriction for general formulas}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41521}, year = {2010}, abstract = {Deductive databases need general formulas in rule bodies, not only conjuctions of literals. This is well known since the work of Lloyd and Topor about extended logic programming. Of course, formulas must be restricted in such a way that they can be effectively evaluated in finite time, and produce only a finite number of new tuples (in each iteration of the TP-operator: the fixpoint can still be infinite). It is also necessary to respect binding restrictions of built-in predicates: many of these predicates can be executed only when certain arguments are ground. Whereas for standard logic programming rules, questions of safety, allowedness, and range-restriction are relatively easy and well understood, the situation for general formulas is a bit more complicated. We give a syntactic analysis of formulas that guarantees the necessary properties.}, language = {en} } @inproceedings{GebserHinrichsSchaubetal.2010, author = {Gebser, Martin and Hinrichs, Henrik and Schaub, Torsten and Thiele, Sven}, title = {xpanda: a (simple) preprocessor for adding multi-valued propositions to ASP}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-41466}, year = {2010}, abstract = {We introduce a simple approach extending the input language of Answer Set Programming (ASP) systems by multi-valued propositions. Our approach is implemented as a (prototypical) preprocessor translating logic programs with multi-valued propositions into logic programs with Boolean propositions only. Our translation is modular and heavily benefits from the expressive input language of ASP. The resulting approach, along with its implementation, allows for solving interesting constraint satisfaction problems in ASP, showing a good performance.}, language = {en} } @inproceedings{HofukuChoNishidaetal.2013, author = {Hofuku, Yayoi and Cho, Shinya and Nishida, Tomohiro and Kanemune, Susumu and Linck, Barbara and Kim, Seungyon and Park, Seongbin and Van{\´i}ček, Jiř{\´i} and Gujberov{\´a}, Monika and Tomcs{\´a}nyi, Peter and Dagiene, Valentina and Jevsikova, Tatjana and Schulte, Carsten and Sentance, Sue and Thota, Neena and G{\"u}lbahar, Yasemin and İlkhan, Mustafa and Kilis, Selcan and Arslan, Okan and Nakano, Yoshiaki and Izutsu, Katsunobu and Lessner, Daniel and Reffay, Christophe and Miled, Mahdi and Ortiz, Pascal and F{\´e}vrier, Lo{\"i}c and Grgurina, Nataša and Weise, Martin and Bellettini, Carlo and Lonati, Violetta and Malchiodi, Dario and Monga, Mattia and Morpurgo, Anna and Torelli, Mauro and Planteu, Lukas and Standl, Bernhard and Grossmann, Wilfried and Neuwirth, Erich and Benacka, Jan and Ragonis, Noa and Hodges, Steve and Berry, Carol and Kusterer, Peter}, title = {Informatics in schools : local proceedings of the 6th International Conference ISSEP 2013 ; selected papers ; Oldenburg, Germany, February 26-March 2, 2013}, editor = {Diethelm, Ira and Arndt, Jannik and D{\"u}nnebier, Malte and Syrbe, J{\"o}rn}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, isbn = {978-3-86956-222-3}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-63688}, pages = {162}, year = {2013}, abstract = {The International Conference on Informatics in Schools: Situation, Evolution and Perspectives - ISSEP - is a forum for researchers and practitioners in the area of Informatics education, both in primary and secondary schools. It provides an opportunity for educators to reflect upon the goals and objectives of this subject, its curricula and various teaching/learning paradigms and topics, possible connections to everyday life and various ways of establishing Informatics Education in schools. This conference also cares about teaching/learning materials, various forms of assessment, traditional and innovative educational research designs, Informatics' contribution to the preparation of children for the 21st century, motivating competitions, projects and activities supporting informatics education in school.}, language = {en} } @inproceedings{WedernikovSlivaEbseevetal.2013, author = {Wedernikov, Nikolaij T. and Sliva, Anatolij J. and Ebseev, Boris S. and Mitjukov, Mitjukov, Michail Alekseevič and Bobrowa, Vera K. and Yustus, Ekaterina and Postier, R{\"u}diger and Schulze, Carola and Hoof, Karsten and Steinhorst, Lars and Straschun, Boris A. and Narutto, Svetlana Vasil'evna and Michaleva, Nadezda A. and Fadeev, Vladimir Ivanovič and Warlen, Maria V.}, title = {Verfassungsgerichtsbarkeit in der Russischen F{\"o}deration und in der Bundesrepublik Deutschland}, editor = {Schulze, Carola and Fadeev, Vladimir Ivanovič}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, organization = {Universit{\"a}t Potsdam, Juristische Fakult{\"a}t}, isbn = {978-3-86956-267-4}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-67861}, pages = {XX, 152}, year = {2013}, abstract = {Der Tagungsband enth{\"a}lt die Referate und Diskussionsbeitr{\"a}ge des in Moskau an der Staatlichen Juristischen Kutafin-Universit{\"a}t am 9. und 10. Oktober 2012 durchgef{\"u}hrten Rundtischgespr{\"a}ches zur Verfassungsgerichtsbarkeit. Behandelt werden ausgew{\"a}hlte rechtshistorische und -politische Fragen sowie aktuelle rechtliche Probleme der Verfassungsgerichtsbarkeit in der Russischen F{\"o}deration und der Bundesrepublik Deutschland sowohl aus der Sicht der Rechtspraxis als auch der Wissenschaft: insbesondere die Entwicklung der Verfassungsgerichtsbarkeit in Geschichte und Gegenwart, Status, Rechtsnatur und Aufgaben des Verfassungsgerichts in den Subjekten der F{\"o}deration und in den L{\"a}ndern sowie Verfassungsgericht und Gesetzgebung. Zudem werden Spezialfragen der Verfassungsgerichtsbarkeit er{\"o}rtert, z.B. die Institution des Bevollm{\"a}chtigten Vertreters des Pr{\"a}sidenten im Verfassungsgericht in Russland, der Eilrechtsschutz durch das BVerfG und der Rechtsschutz bei {\"u}berlangen Verfahren vor dem BVerfG in Deutschland.}, language = {de} } @inproceedings{ArlettazBadoBakosKovacsetal.2014, author = {Arlettaz, Jordane and Bad{\´o}, Attila and Bakos-Kov{\´a}cs, Kitti and Bat{\´o}, Szilvia and B{\´o}ka, J{\´a}nos and Congnard, Laureline and Csatl{\´o}s, Erzs{\´e}bet and Gedeon, Magdolna and Guemar, Carine and Hajd{\´u}, D{\´o}ra and Jacs{\´o}, Judit and Kelemen, Katalin and Fekete, Bal{\´a}zs and Kruzslicz, P{\´e}ter and Mezei, P{\´e}ter and Szűcs, T{\"u}nde and Zolt{\´a}n, P{\´e}teri and Szab{\´o}, B{\´e}la P.}, title = {Internationale Konferenz zum zehnj{\"a}hrigen Bestehen des Instituts f{\"u}r Rechtsvergleichung der Universit{\"a}t Szeged}, editor = {Bad{\´o}, Attila and Belling, Detlev W. and B{\´o}ka, J{\´a}nos and Mezei, P{\´e}ter}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, isbn = {978-3-86956-308-4}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-72039}, pages = {296}, year = {2014}, abstract = {Die Konferenz „International Conference for the 10th Anniversary of the Institute of Comparative Law" hat am 24. Mai 2013 in Szeged stattgefunden. Im Rahmen der viersprachigen Konferenz haben mehr als dreißig Teilnehmer ihre Forschungsergebnisse pr{\"a}sentiert. Der Essay von Zolt{\´a}n P{\´e}teri blickt auf die Disziplin aus der Perspektive der Wissenschaftsgeschichte. Katalin Kelemen und Bal{\´a}zs Fekete gehen in ihrem Aufsatz der Frage nach, welchen Weg die Versuche der Klassifikation der Rechtssysteme von Osteuropa in der sp{\"a}ten Phase der Umbr{\"u}che der 1980/90er Jahren genommen haben. Die historische Betrachtungsweise mit Bezug auf Rechtsgeschichte und Rechtsvergleichung spiegelt sich auch in anderen Essays wider, vor allem in den Aufs{\"a}tzen von Szilvia Bat{\´o}, Magdolna Gedeon und B{\´e}la Szab{\´o} P. sowie auch in den Aufs{\"a}tzen von P{\´e}ter Mezei und T{\"u}nde Szűcs. Attila Bad{\´o} analysiert die Rechtsvergleichung aus der Sicht des Rechts, der Soziologie und der Politikwissenschaft anhand von Untersuchungen {\"u}ber das Sanktionsystem der Richter in den USA. Diese politikwissenschaftliche Seite wird auch in den Aufs{\"a}tzen {\"u}ber die aktuellen Fragen der europ{\"a}ischen Integration von Carine Guemar und Laureline Congnard betont. Eine Reihe von Aufs{\"a}tzen behandeln die konventionelle normative Komparatistik auf dem Gebiet des Verfassungsrechts (Jordane Arlettaz und P{\´e}ter Kruzslicz), Gesellschaftsrechts (Kitti Bakos-Kov{\´a}cs), Urheberrechts (D{\´o}ra Hajd{\´u}) und Steuerrechts (Judit Jacs{\´o}). Daneben bilden eine weitere Gruppe die Aufs{\"a}tze von J{\´a}nos B{\´o}ka und Erzs{\´e}bet Csatl{\´o}s, die die Verwendung der vergleichenden Methode in der Praxis der Rechtsprechung untersuchen. Die Rechtsvergleichung ist eine sich dynamisch entwickelnde Disziplin. Die Konferenz und dieser Band dienen nicht nur der W{\"u}rdigung der bisherigen Arbeit des Instituts f{\"u}r Rechtsvergleichung, sondern zeigen gleichzeitig neue Ziele auf. Die wichtigsten Grunds{\"a}tze bleiben aber fest verankert auch in einem sich stets ver{\"a}ndernden rechtlichen und geistigen Umfeld. Das Motto des Instituts lautet „instruere et docere omnes qui edoceri desiderant" - „alle lehren, die lernen wollen." Auch in den folgenden Jahrzehnten werden uns der Wille des Lernens und Lehrens, die Freiheit der Forschung sowie die {\"U}bertragung und Weiterentwicklung der ungarischen wie globalen Rechtskultur leiten.}, language = {de} } @inproceedings{CurzonKalasSchubertetal.2015, author = {Curzon, Paul and Kalas, Ivan and Schubert, Sigrid and Schaper, Niclas and Barnes, Jan and Kennewell, Steve and Br{\"o}ker, Kathrin and Kastens, Uwe and Magenheim, Johannes and Dagiene, Valentina and Stupuriene, Gabriele and Ellis, Jason Brent and Abreu-Ellis, Carla Reis and Grillenberger, Andreas and Romeike, Ralf and Haugsbakken, Halvdan and Jones, Anthony and Lewin, Cathy and McNicol, Sarah and Nelles, Wolfgang and Neugebauer, Jonas and Ohrndorf, Laura and Schaper, Niclas and Schubert, Sigrid and Opel, Simone and Kramer, Matthias and Trommen, Michael and Pottb{\"a}cker, Florian and Ilaghef, Youssef and Passig, David and Tzuriel, David and Kedmi, Ganit Eshel and Saito, Toshinori and Webb, Mary and Weigend, Michael and Bottino, Rosa and Chioccariello, Augusto and Christensen, Rhonda and Knezek, Gerald and Gioko, Anthony Maina and Angondi, Enos Kiforo and Waga, Rosemary and Ohrndorf, Laura and Or-Bach, Rachel and Preston, Christina and Younie, Sarah and Przybylla, Mareen and Romeike, Ralf and Reynolds, Nicholas and Swainston, Andrew and Bendrups, Faye and Sysło, Maciej M. and Kwiatkowska, Anna Beata and Zieris, Holger and Gerstberger, Herbert and M{\"u}ller, Wolfgang and B{\"u}chner, Steffen and Opel, Simone and Schiller, Thomas and Wegner, Christian and Zender, Raphael and Lucke, Ulrike and Diethelm, Ira and Syrbe, J{\"o}rn and Lai, Kwok-Wing and Davis, Niki and Eickelmann, Birgit and Erstad, Ola and Fisser, Petra and Gibson, David and Khaddage, Ferial and Knezek, Gerald and Micheuz, Peter and Kloos, Carlos Delgado}, title = {KEYCIT 2014}, editor = {Brinda, Torsten and Reynolds, Nicholas and Romeike, Ralf and Schwill, Andreas}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, isbn = {978-3-86956-292-6}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-70325}, pages = {438}, year = {2015}, abstract = {In our rapidly changing world it is increasingly important not only to be an expert in a chosen field of study but also to be able to respond to developments, master new approaches to solving problems, and fulfil changing requirements in the modern world and in the job market. In response to these needs key competencies in understanding, developing and using new digital technologies are being brought into focus in school and university programmes. The IFIP TC3 conference "KEYCIT - Key Competences in Informatics and ICT (KEYCIT 2014)" was held at the University of Potsdam in Germany from July 1st to 4th, 2014 and addressed the combination of key competencies, Informatics and ICT in detail. The conference was organized into strands focusing on secondary education, university education and teacher education (organized by IFIP WGs 3.1 and 3.3) and provided a forum to present and to discuss research, case studies, positions, and national perspectives in this field.}, language = {en} } @inproceedings{Schneider2022, author = {Schneider, Ulrike}, title = {Contextualizing Jewish American Literature}, series = {"Minor minorities" and multiculturalism : Italian American and Jewish American literature}, booktitle = {"Minor minorities" and multiculturalism : Italian American and Jewish American literature}, publisher = {Eum}, address = {Macerata}, isbn = {978-88-6056-777-2}, issn = {2974-5225}, doi = {10.13138/iaam_60567789}, pages = {331 -- 340}, year = {2022}, language = {de} } @inproceedings{TrilckeParrD'Aprileetal.2022, author = {Trilcke, Peer and Parr, Rolf and D'Aprile, Iwan-Michelangelo and Kraus, Hans-Christof and Blomqvist, Clarissa and McGillen, Petra S. and Aus der Au, Carmen and Phillips, Alexander Robert and Helmer, Debora and Singer, R{\"u}diger and G{\"o}rner, R{\"u}diger and Berbig, Roland and Rose, Dirk and Wilhelms, Kerstin and Krause, Marcus and Hehle, Christine and Gretz, Daniela and Gfrereis, Heike and Lepp, Nicola and Morlok, Franziska and Haut, Gideon and Brechenmacher, Thomas and Stauffer, Isabelle and Lyon, John B. and Bachmann, Vera and Ewert, Michael and Immer, Nikolas and Vedder, Ulrike and Fischer, Hubertus and Becker, Sabina and Wegmann, Christoph and M{\"o}ller, Klaus-Peter and Schneider, Ulrike and Waszynski, Alexander and Wedel, Michael and Brehm, David and Wolpert, Georg}, title = {Fontanes Medien}, editor = {Trilcke, Peer}, publisher = {De Gruyter}, address = {Berlin}, isbn = {978-3-11-073330-3}, doi = {10.1515/9783110733235}, pages = {XIII, 672}, year = {2022}, abstract = {Theodor Fontane war, im durchaus modernen Sinne, ein Medienarbeiter: Als Presse-Agent in London lernte er die innovativste Presselandschaft seiner Zeit kennen; als Redakteur in Berlin leistete er journalistische K{\"a}rrnerarbeit; er schrieb Kritiken {\"u}ber das Theater, die bildende Kunst und die Literatur - und auch seine Romane wie seine Reiseb{\"u}cher sind stets Medienprodukte, als Serien in in Zeitungen und Zeitschriften platziert, bevor sie auf dem Buchmarkt erschienen. Der vorliegende Band dokumentiert die Ergebnisse eines internationalen Kongresses, veranstaltet 2019 vom Theodor-Fontane-Archiv in Potsdam. Die ebenso rasante wie umfassende Medialisierung und Vernetzung der Gesellschaft im Laufe des 19. Jahrhunderts wird dabei als produktive Voraussetzung der schriftstellerischen T{\"a}tigkeit Fontanes begriffen. Eingebettet in ein weit verzweigtes Netz der Korrespondenz und der postalischen Textzirkulation, vertraut mit den Routinen und Publika der periodischen Massenpresse, f{\"u}r die er sein Leben lang schrieb, und auf vielf{\"a}ltige Weise gepr{\"a}gt von der visuellen Kultur seiner Zeit wird Theodor Fontane als gleichermaßen journalistisch versierter wie {\"a}sthetisch sensibler Grenzg{\"a}nger erkennbar.}, language = {de} } @inproceedings{DeselOpelSiegerisetal.2023, author = {Desel, J{\"o}rg and Opel, Simone and Siegeris, Juliane and Draude, Claude and Weber, Gerhard and Schell, Timon and Schwill, Andreas and Thorbr{\"u}gge, Carsten and Sch{\"a}fer, Len Ole and Netzer, Cajus Marian and Gerstenberger, Dietrich and Winkelnkemper, Felix and Schulte, Carsten and B{\"o}ttcher, Axel and Thurner, Veronika and H{\"a}fner, Tanja and Ottinger, Sarah and Große-B{\"o}lting, Gregor and Scheppach, Lukas and M{\"u}hling, Andreas and Baberowski, David and Leonhardt, Thiemo and Rentsch, Susanne and Bergner, Nadine and Bonorden, Leif and Stemme, Jonas and Hoppe, Uwe and Weicker, Karsten and Bender, Esther and Barbas, Helena and Hamann, Fabian and Soll, Marcus and Sitzmann, Daniel}, title = {Hochschuldidaktik Informatik HDI 2021}, series = {Commentarii informaticae didacticae}, booktitle = {Commentarii informaticae didacticae}, number = {13}, editor = {Desel, J{\"o}rg and Opel, Simone and Siegeris, Juliane}, publisher = {Universit{\"a}tsverlag Potsdam}, address = {Potsdam}, isbn = {978-3-86956-548-4}, issn = {1868-0844}, doi = {10.25932/publishup-56507}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-565070}, pages = {299}, year = {2023}, abstract = {Die Fachtagungen HDI (Hochschuldidaktik Informatik) besch{\"a}ftigen sich mit den unterschiedlichen Aspekten informatischer Bildung im Hochschulbereich. Neben den allgemeinen Themen wie verschiedenen Lehr- und Lernformen, dem Einsatz von Informatiksystemen in der Hochschullehre oder Fragen der Gewinnung von geeigneten Studierenden, deren Kompetenzerwerb oder auch der Betreuung der Studierenden widmet sich die HDI immer auch einem Schwerpunktthema. Im Jahr 2021 war dies die Ber{\"u}cksichtigung von Diversit{\"a}t in der Lehre. Diskutiert wurden beispielsweise die Einbeziehung von besonderen fachlichen und {\"u}berfachlichen Kompetenzen Studierender, der Unterst{\"u}tzung von Durchl{\"a}ssigkeit aus nichtakademischen Berufen, aber auch die Gestaltung inklusiver Lehr- und Lernszenarios, Aspekte des Lebenslangen Lernens oder sich an die Diversit{\"a}t von Studierenden adaptierte oder adaptierende Lehrsysteme. Dieser Band enth{\"a}lt ausgew{\"a}hlte Beitr{\"a}ge der 9. Fachtagung 2021, die in besonderer Weise die Konferenz und die dort diskutierten Themen repr{\"a}sentieren.}, language = {de} }