@article{PrasseKnaebelMachlicaetal.2019, author = {Prasse, Paul and Knaebel, Rene and Machlica, Lukas and Pevny, Tomas and Scheffer, Tobias}, title = {Joint detection of malicious domains and infected clients}, series = {Machine learning}, volume = {108}, journal = {Machine learning}, number = {8-9}, publisher = {Springer}, address = {Dordrecht}, issn = {0885-6125}, doi = {10.1007/s10994-019-05789-z}, pages = {1353 -- 1368}, year = {2019}, abstract = {Detection of malware-infected computers and detection of malicious web domains based on their encrypted HTTPS traffic are challenging problems, because only addresses, timestamps, and data volumes are observable. The detection problems are coupled, because infected clients tend to interact with malicious domains. Traffic data can be collected at a large scale, and antivirus tools can be used to identify infected clients in retrospect. Domains, by contrast, have to be labeled individually after forensic analysis. We explore transfer learning based on sluice networks; this allows the detection models to bootstrap each other. In a large-scale experimental study, we find that the model outperforms known reference models and detects previously unknown malware, previously unknown malware families, and previously unknown malicious domains.}, language = {en} } @article{CabalarFandinnoSchaubetal.2019, author = {Cabalar, Pedro and Fandinno, Jorge and Schaub, Torsten H. and Schellhorn, Sebastian}, title = {Gelfond-Zhang aggregates as propositional formulas}, series = {Artificial intelligence}, volume = {274}, journal = {Artificial intelligence}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0004-3702}, doi = {10.1016/j.artint.2018.10.007}, pages = {26 -- 43}, year = {2019}, abstract = {Answer Set Programming (ASP) has become a popular and widespread paradigm for practical Knowledge Representation thanks to its expressiveness and the available enhancements of its input language. One of such enhancements is the use of aggregates, for which different semantic proposals have been made. In this paper, we show that any ASP aggregate interpreted under Gelfond and Zhang's (GZ) semantics can be replaced (under strong equivalence) by a propositional formula. Restricted to the original GZ syntax, the resulting formula is reducible to a disjunction of conjunctions of literals but the formulation is still applicable even when the syntax is extended to allow for arbitrary formulas (including nested aggregates) in the condition. Once GZ-aggregates are represented as formulas, we establish a formal comparison (in terms of the logic of Here-and-There) to Ferraris' (F) aggregates, which are defined by a different formula translation involving nested implications. In particular, we prove that if we replace an F-aggregate by a GZ-aggregate in a rule head, we do not lose answer sets (although more can be gained). This extends the previously known result that the opposite happens in rule bodies, i.e., replacing a GZ-aggregate by an F-aggregate in the body may yield more answer sets. Finally, we characterize a class of aggregates for which GZ- and F-semantics coincide.}, language = {en} } @article{AguadoCabalarFandinoetal.2019, author = {Aguado, Felicidad and Cabalar, Pedro and Fandi{\~n}o, Jorge and Pearce, David and Perez, Gilberto and Vidal-Peracho, Concepcion}, title = {Revisiting Explicit Negation in Answer Set Programming}, series = {Theory and practice of logic programming}, volume = {19}, journal = {Theory and practice of logic programming}, number = {5-6}, publisher = {Cambridge Univ. Press}, address = {New York}, issn = {1471-0684}, doi = {10.1017/S1471068419000267}, pages = {908 -- 924}, year = {2019}, language = {en} } @phdthesis{AbdelwahabHusseinAbdelwahabElsayed2019, author = {Abdelwahab Hussein Abdelwahab Elsayed, Ahmed}, title = {Probabilistic, deep, and metric learning for biometric identification from eye movements}, doi = {10.25932/publishup-46798}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-467980}, school = {Universit{\"a}t Potsdam}, pages = {vi, 65}, year = {2019}, abstract = {A central insight from psychological studies on human eye movements is that eye movement patterns are highly individually characteristic. They can, therefore, be used as a biometric feature, that is, subjects can be identified based on their eye movements. This thesis introduces new machine learning methods to identify subjects based on their eye movements while viewing arbitrary content. The thesis focuses on probabilistic modeling of the problem, which has yielded the best results in the most recent literature. The thesis studies the problem in three phases by proposing a purely probabilistic, probabilistic deep learning, and probabilistic deep metric learning approach. In the first phase, the thesis studies models that rely on psychological concepts about eye movements. Recent literature illustrates that individual-specific distributions of gaze patterns can be used to accurately identify individuals. In these studies, models were based on a simple parametric family of distributions. Such simple parametric models can be robustly estimated from sparse data, but have limited flexibility to capture the differences between individuals. Therefore, this thesis proposes a semiparametric model of gaze patterns that is flexible yet robust for individual identification. These patterns can be understood as domain knowledge derived from psychological literature. Fixations and saccades are examples of simple gaze patterns. The proposed semiparametric densities are drawn under a Gaussian process prior centered at a simple parametric distribution. Thus, the model will stay close to the parametric class of densities if little data is available, but it can also deviate from this class if enough data is available, increasing the flexibility of the model. The proposed method is evaluated on a large-scale dataset, showing significant improvements over the state-of-the-art. Later, the thesis replaces the model based on gaze patterns derived from psychological concepts with a deep neural network that can learn more informative and complex patterns from raw eye movement data. As previous work has shown that the distribution of these patterns across a sequence is informative, a novel statistical aggregation layer called the quantile layer is introduced. It explicitly fits the distribution of deep patterns learned directly from the raw eye movement data. The proposed deep learning approach is end-to-end learnable, such that the deep model learns to extract informative, short local patterns while the quantile layer learns to approximate the distributions of these patterns. Quantile layers are a generic approach that can converge to standard pooling layers or have a more detailed description of the features being pooled, depending on the problem. The proposed model is evaluated in a large-scale study using the eye movements of subjects viewing arbitrary visual input. The model improves upon the standard pooling layers and other statistical aggregation layers proposed in the literature. It also improves upon the state-of-the-art eye movement biometrics by a wide margin. Finally, for the model to identify any subject — not just the set of subjects it is trained on — a metric learning approach is developed. Metric learning learns a distance function over instances. The metric learning model maps the instances into a metric space, where sequences of the same individual are close, and sequences of different individuals are further apart. This thesis introduces a deep metric learning approach with distributional embeddings. The approach represents sequences as a set of continuous distributions in a metric space; to achieve this, a new loss function based on Wasserstein distances is introduced. The proposed method is evaluated on multiple domains besides eye movement biometrics. This approach outperforms the state of the art in deep metric learning in several domains while also outperforming the state of the art in eye movement biometrics.}, language = {en} } @misc{Fandinno2019, author = {Fandinno, Jorge}, title = {Founded (auto)epistemic equilibrium logic satisfies epistemic splitting}, series = {Postprints der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, number = {1060}, issn = {1866-8372}, doi = {10.25932/publishup-46968}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-469685}, pages = {671 -- 687}, year = {2019}, abstract = {In a recent line of research, two familiar concepts from logic programming semantics (unfounded sets and splitting) were extrapolated to the case of epistemic logic programs. The property of epistemic splitting provides a natural and modular way to understand programs without epistemic cycles but, surprisingly, was only fulfilled by Gelfond's original semantics (G91), among the many proposals in the literature. On the other hand, G91 may suffer from a kind of self-supported, unfounded derivations when epistemic cycles come into play. Recently, the absence of these derivations was also formalised as a property of epistemic semantics called foundedness. Moreover, a first semantics proved to satisfy foundedness was also proposed, the so-called Founded Autoepistemic Equilibrium Logic (FAEEL). In this paper, we prove that FAEEL also satisfies the epistemic splitting property something that, together with foundedness, was not fulfilled by any other approach up to date. To prove this result, we provide an alternative characterisation of FAEEL as a combination of G91 with a simpler logic we called Founded Epistemic Equilibrium Logic (FEEL), which is somehow an extrapolation of the stable model semantics to the modal logic S5.}, language = {en} } @misc{AguadoCabalarFandinnoetal.2019, author = {Aguado, Felicidad and Cabalar, Pedro and Fandinno, Jorge and Pearce, David and Perez, Gilberto and Vidal, Concepcion}, title = {Revisiting explicit negation in answer set programming}, series = {Postprints der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, number = {1104}, issn = {1866-8372}, doi = {10.25932/publishup-46969}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-469697}, pages = {908 -- 924}, year = {2019}, abstract = {A common feature in Answer Set Programming is the use of a second negation, stronger than default negation and sometimes called explicit, strong or classical negation. This explicit negation is normally used in front of atoms, rather than allowing its use as a regular operator. In this paper we consider the arbitrary combination of explicit negation with nested expressions, as those defined by Lifschitz, Tang and Turner. We extend the concept of reduct for this new syntax and then prove that it can be captured by an extension of Equilibrium Logic with this second negation. We study some properties of this variant and compare to the already known combination of Equilibrium Logic with Nelson's strong negation.}, language = {en} } @misc{Strickroth2019, author = {Strickroth, Sven}, title = {PLATON}, series = {Postprints der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam : Mathematisch-Naturwissenschaftliche Reihe}, number = {804}, issn = {1866-8372}, doi = {10.25932/publishup-44188}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-441887}, pages = {28}, year = {2019}, abstract = {Lesson planning is both an important and demanding task—especially as part of teacher training. This paper presents the requirements for a lesson planning system and evaluates existing systems regarding these requirements. One major drawback of existing software tools is that most are limited to a text- or form-based representation of the lesson designs. In this article, a new approach with a graphical, time-based representation with (automatic) analyses methods is proposed and the system architecture and domain model are described in detail. The approach is implemented in an interactive, web-based prototype called PLATON, which additionally supports the management of lessons in units as well as the modelling of teacher and student-generated resources. The prototype was evaluated in a study with 61 prospective teachers (bachelor's and master's preservice teachers as well as teacher trainees in post-university teacher training) in Berlin, Germany, with a focus on usability. The results show that this approach proofed usable for lesson planning and offers positive effects for the perception of time and self-reflection.}, language = {en} } @article{MetrefCosmeLeSommeretal.2019, author = {Metref, Sammy and Cosme, Emmanuel and Le Sommer, Julien and Poel, Nora and Brankart, Jean-Michel and Verron, Jacques and Gomez Navarro, Laura}, title = {Reduction of spatially structured errors in Wide-Swath altimetric satellite data using data assimilation}, series = {Remote sensing}, volume = {11}, journal = {Remote sensing}, number = {11}, publisher = {MDPI}, address = {Basel}, issn = {2072-4292}, doi = {10.3390/rs11111336}, pages = {21}, year = {2019}, abstract = {The Surface Water and Ocean Topography (SWOT) mission is a next generation satellite mission expected to provide a 2 km-resolution observation of the sea surface height (SSH) on a two-dimensional swath. Processing SWOT data will be challenging because of the large amount of data, the mismatch between a high spatial resolution and a low temporal resolution, and the observation errors. The present paper focuses on the reduction of the spatially structured errors of SWOT SSH data. It investigates a new error reduction method and assesses its performance in an observing system simulation experiment. The proposed error-reduction method first projects the SWOT SSH onto a subspace spanned by the SWOT spatially structured errors. This projection is removed from the SWOT SSH to obtain a detrended SSH. The detrended SSH is then processed within an ensemble data assimilation analysis to retrieve a full SSH field. In the latter step, the detrending is applied to both the SWOT data and an ensemble of model-simulated SSH fields. Numerical experiments are performed with synthetic SWOT observations and an ensemble from a North Atlantic, 1/60 degrees simulation of the ocean circulation (NATL60). The data assimilation analysis is carried out with an ensemble Kalman filter. The results are assessed with root mean square errors, power spectrum density, and spatial coherence. They show that a significant part of the large scale SWOT errors is reduced. The filter analysis also reduces the small scale errors and allows for an accurate recovery of the energy of the signal down to 25 km scales. In addition, using the SWOT nadir data to adjust the SSH detrending further reduces the errors.}, language = {en} } @article{DimopoulosGebserLuehneetal.2019, author = {Dimopoulos, Yannis and Gebser, Martin and L{\"u}hne, Patrick and Romero Davila, Javier and Schaub, Torsten H.}, title = {plasp 3}, series = {Theory and practice of logic programming}, volume = {19}, journal = {Theory and practice of logic programming}, number = {3}, publisher = {Cambridge Univ. Press}, address = {New York}, issn = {1471-0684}, doi = {10.1017/S1471068418000583}, pages = {477 -- 504}, year = {2019}, abstract = {We describe the new version of the Planning Domain Definition Language (PDDL)-to-Answer Set Programming (ASP) translator plasp. First, it widens the range of accepted PDDL features. Second, it contains novel planning encodings, some inspired by Satisfiability Testing (SAT) planning and others exploiting ASP features such as well-foundedness. All of them are designed for handling multivalued fluents in order to capture both PDDL as well as SAS planning formats. Third, enabled by multishot ASP solving, it offers advanced planning algorithms also borrowed from SAT planning. As a result, plasp provides us with an ASP-based framework for studying a variety of planning techniques in a uniform setting. Finally, we demonstrate in an empirical analysis that these techniques have a significant impact on the performance of ASP planning.}, language = {en} } @misc{Przybylla2019, author = {Przybylla, Mareen}, title = {Interactive objects in physical computing and their role in the learning process}, series = {Constructivist foundations}, volume = {14}, journal = {Constructivist foundations}, number = {3}, publisher = {Vrije Univ.}, address = {Bussels}, issn = {1782-348X}, pages = {264 -- 266}, year = {2019}, abstract = {The target article discusses the question of how educational makerspaces can become places supportive of knowledge construction. This question is too often neglected by people who run makerspaces, as they mostly explain how to use different tools and focus on the creation of a product. In makerspaces, often pupils also engage in physical computing activities and thus in the creation of interactive artifacts containing embedded systems, such as smart shoes or wristbands, plant monitoring systems or drink mixing machines. This offers the opportunity to reflect on teaching physical computing in computer science education, where similarly often the creation of the product is so strongly focused upon that the reflection of the learning process is pushed into the background.}, language = {en} }