@article{LinkeTompitsWoltran2004, author = {Linke, Thomas and Tompits, Hans and Woltran, Stefan}, title = {On acyclic and head-cycle free nested logic programs}, year = {2004}, language = {en} } @article{Lis2014, author = {Lis, Monika}, title = {Constructing a Phylogenetic Tree}, series = {Process Design for Natural Scientists: an agile model-driven approach}, journal = {Process Design for Natural Scientists: an agile model-driven approach}, number = {500}, editor = {Lambrecht, Anna-Lena and Margaria, Tiziana}, publisher = {Springer Verlag}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {101 -- 109}, year = {2014}, abstract = {In this project I constructed a workflow that takes a DNA sequence as input and provides a phylogenetic tree, consisting of the input sequence and other sequences which were found during a database search. In this phylogenetic tree the sequences are arranged depending on similarities. In bioinformatics, constructing phylogenetic trees is often used to explore the evolutionary relationships of genes or organisms and to understand the mechanisms of evolution itself.}, language = {en} } @article{LisecSteinfathMeyeretal.2009, author = {Lisec, Jan and Steinfath, Matthias and Meyer, Rhonda C. and Selbig, Joachim and Melchinger, Albrecht E. and Willmitzer, Lothar and Altmann, Thomas}, title = {Identification of heterotic metabolite QTL in Arabidopsis thaliana RIL and IL populations}, issn = {0960-7412}, doi = {10.1111/j.1365-313X.2009.03910.x}, year = {2009}, abstract = {Two mapping populations of a cross between the Arabidopsis thaliana accessions Col-0 and C24 were cultivated and analyzed with respect to the levels of 181 metabolites to elucidate the biological phenomenon of heterosis at the metabolic level. The relative mid-parent heterosis in the F-1 hybrids was <20\% for most metabolic traits. The first mapping population consisting of 369 recombinant inbred lines (RILs) and their test cross progeny with both parents allowed us to determine the position and effect of 147 quantitative trait loci (QTL) for metabolite absolute mid-parent heterosis (aMPH). Furthermore, we identified 153 and 83 QTL for augmented additive (Z(1)) and dominance effects (Z(2)), respectively. We identified putative candidate genes for these QTL using the ARACYC database (http://www.arabidopsis.org/ biocyc), and calculated the average degree of dominance, which was within the dominance and over-dominance range for most metabolites. Analyzing a second population of 41 introgression lines (ILs) and their test crosses with the recurrent parent, we identified 634 significant differences in metabolite levels. Nine per cent of these effects were classified as over-dominant, according to the mode of inheritance. A comparison of both approaches suggested epistasis as a major contributor to metabolite heterosis in Arabidopsis. A linear combination of metabolite levels was shown to significantly correlate with biomass heterosis (r = 0.62).}, language = {en} } @article{LorenzClemensSchroetteretal.2022, author = {Lorenz, Claas and Clemens, Vera Elisabeth and Schr{\"o}tter, Max and Schnor, Bettina}, title = {Continuous verification of network security compliance}, series = {IEEE transactions on network and service management}, volume = {19}, journal = {IEEE transactions on network and service management}, number = {2}, publisher = {Institute of Electrical and Electronics Engineers}, address = {New York}, issn = {1932-4537}, doi = {10.1109/TNSM.2021.3130290}, pages = {1729 -- 1745}, year = {2022}, abstract = {Continuous verification of network security compliance is an accepted need. Especially, the analysis of stateful packet filters plays a central role for network security in practice. But the few existing tools which support the analysis of stateful packet filters are based on general applicable formal methods like Satifiability Modulo Theories (SMT) or theorem prover and show runtimes in the order of minutes to hours making them unsuitable for continuous compliance verification. In this work, we address these challenges and present the concept of state shell interweaving to transform a stateful firewall rule set into a stateless rule set. This allows us to reuse any fast domain specific engine from the field of data plane verification tools leveraging smart, very fast, and domain specialized data structures and algorithms including Header Space Analysis (HSA). First, we introduce the formal language FPL that enables a high-level human-understandable specification of the desired state of network security. Second, we demonstrate the instantiation of a compliance process using a verification framework that analyzes the configuration of complex networks and devices - including stateful firewalls - for compliance with FPL policies. Our evaluation results show the scalability of the presented approach for the well known Internet2 and Stanford benchmarks as well as for large firewall rule sets where it outscales state-of-the-art tools by a factor of over 41.}, language = {en} } @article{LuckeRensing2014, author = {Lucke, Ulrike and Rensing, Christoph}, title = {A survey on pervasive education}, series = {Pervasive and mobile computing}, volume = {14}, journal = {Pervasive and mobile computing}, publisher = {Elsevier}, address = {Amsterdam}, issn = {1574-1192}, doi = {10.1016/j.pmcj.2013.12.001}, pages = {3 -- 16}, year = {2014}, abstract = {Researchers and developers worldwide have put their efforts into the design, development and use of information and communication technology to support teaching and learning. This research is driven by pedagogical as well as technological disciplines. The most challenging ideas are currently found in the application of mobile, ubiquitous, pervasive, contextualized and seamless technologies for education, which we shall refer to as pervasive education. This article provides a comprehensive overview of the existing work in this field and categorizes it with respect to educational settings. Using this approach, best practice solutions for certain educational settings and open questions for pervasive education are highlighted in order to inspire interested developers and educators. The work is assigned to different fields, identified by the main pervasive technologies used and the educational settings. Based on these assignments we identify areas within pervasive education that are currently disregarded or deemed challenging so that further research and development in these fields are stimulated in a trans-disciplinary approach. (C) 2013 Elsevier B.V. All rights reserved.}, language = {en} } @article{LuckowJhaKimetal.2009, author = {Luckow, Andre and Jha, Shantenu and Kim, Joohyun and Merzky, Andre and Schnor, Bettina}, title = {Adaptive distributed replica-exchange simulations}, issn = {1364-503X}, doi = {10.1098/rsta.2009.0051}, year = {2009}, abstract = {Owing to the loose coupling between replicas, the replica-exchange (RE) class of algorithms should be able to benefit greatly from using as many resources as available. However, the ability to effectively use multiple distributed resources to reduce the time to completion remains a challenge at many levels. Additionally, an implementation of a pleasingly distributed algorithm such as replica-exchange, which is independent of infrastructural details, does not exist. This paper proposes an extensible and scalable framework based on Simple API for Grid Applications that provides a general-purpose, opportunistic mechanism to effectively use multiple resources in an infrastructure-independent way. By analysing the requirements of the RE algorithm and the challenges of implementing it on real production systems, we propose a new abstraction (BIGJOB), which forms the basis of the adaptive redistribution and effective scheduling of replicas.}, language = {en} } @article{LuckowSchnor2006, author = {Luckow, Andr{\´e} and Schnor, Bettina}, title = {Migol : a Fault Tolerant Service Framework for Grid Computing : Evolution to WSRF (2006)}, year = {2006}, language = {en} } @article{LuckowSchnor2008, author = {Luckow, Andr{\´e} and Schnor, Bettina}, title = {Migol : a fault-tolerant service framework for MPI applications in the grid}, doi = {10.1016/j.future.2007.03.007}, year = {2008}, abstract = {Especially for sciences the provision of massive parallel CPU capacity is one of the most attractive features of a grid. A major challenge in a distributed, inherently dynamic grid is fault tolerance. The more resources and components involved, the more complicated and error-prone becomes the system. In a grid with potentially thousands of machines connected to each other the reliability of individual resources cannot be guaranteed.The benefit of the grid is that in case of a failure ail application may be migrated and restarted from a checkpoint file on another site. This approach requires a service infrastructure which handles the necessary activities transparently. In this article, we present Migol, a fault-tolerant and self-healing grid middleware for MPI applications. Migol is based on open standards and extends the services of the Globus toolkit to support the fault tolerance of grid applications.Further, the Migol framework itself is designed with special focus on fault tolerance. For example, Migol eplicates ritical services and uses a ring-based replication protocol to achieve data consistency. (c) 2007 Elsevier B.V. All rights reserved.}, language = {en} } @article{LuebbeWeske2012, author = {Luebbe, Alexander and Weske, Mathias}, title = {Determining the effect of tangible business process modeling}, year = {2012}, language = {en} } @article{LuebbeWeske2011, author = {Luebbe, Alexander and Weske, Mathias}, title = {Bringing design thinking to business process modeling}, isbn = {978-3-642-13756-3}, year = {2011}, language = {en} } @article{LutherTiberiusBrem2020, author = {Luther, Laura and Tiberius, Victor and Brem, Alexander}, title = {User experience (UX) in business, management, and psychology}, series = {Multimodal technologies and interaction : open access journal}, volume = {4}, journal = {Multimodal technologies and interaction : open access journal}, number = {2}, publisher = {MDPI}, address = {Basel}, issn = {2414-4088}, doi = {10.3390/mti4020018}, pages = {19}, year = {2020}, abstract = {User Experience (UX) describes the holistic experience of a user before, during, and after interaction with a platform, product, or service. UX adds value and attraction to their sole functionality and is therefore highly relevant for firms. The increased interest in UX has produced a vast amount of scholarly research since 1983. The research field is, therefore, complex and scattered. Conducting a bibliometric analysis, we aim at structuring the field quantitatively and rather abstractly. We employed citation analyses, co-citation analyses, and content analyses to evaluate productivity and impact of extant research. We suggest that future research should focus more on business and management related topics.}, language = {en} } @article{LyTarkhanov2009, author = {Ly, Ibrahim and Tarkhanov, Nikolai Nikolaevich}, title = {A variational approach to the Cauchy problem for nonlinear elliptic differential equations}, issn = {0928-0219}, doi = {10.1515/Jiip.2009.037}, year = {2009}, abstract = {We discuss the relaxation of a class of nonlinear elliptic Cauchy problems with data on a piece S of the boundary surface by means of a variational approach known in the optimal control literature as "equation error method". By the Cauchy problem is meant any boundary value problem for an unknown function y in a domain X with the property that the data on S, if combined with the differential equations in X, allow one to determine all derivatives of y on S by means of functional equations. In the case of real analytic data of the Cauchy problem, the existence of a local solution near S is guaranteed by the Cauchy-Kovalevskaya theorem. We also admit overdetermined elliptic systems, in which case the set of those Cauchy data on S for which the Cauchy problem is solvable is very "thin". For this reason we discuss a variational setting of the Cauchy problem which always possesses a generalised solution.}, language = {en} } @article{MarcoFigueraRiedelRossietal.2022, author = {Marco Figuera, Ramiro and Riedel, Christian and Rossi, Angelo Pio and Unnithan, Vikram}, title = {Depth to diameter analysis on small simple craters at the lunar south pole - possible implications for ice harboring}, series = {Remote sensing}, volume = {14}, journal = {Remote sensing}, number = {3}, publisher = {MDPI}, address = {Basel}, issn = {2072-4292}, doi = {10.3390/rs14030450}, pages = {13}, year = {2022}, abstract = {In this paper, we present a study comparing the depth to diameter (d/D) ratio of small simple craters (200-1000 m) of an area between -88.5 degrees to -90 degrees latitude at the lunar south pole containing Permanent Shadowed Regions (PSRs) versus craters without PSRs. As PSRs can reach temperatures of 110 K and are capable of harboring volatiles, especially water ice, we analyzed the relationship of depth versus diameter ratios and its possible implications for harboring water ice. Variations in the d/D ratios can also be caused by other processes such as degradation, isostatic adjustment, or differences in surface properties. The conducted d/D ratio analysis suggests that a differentiation between craters containing PSRs versus craters without PSRs occurs. Thus, a possible direct relation between d/D ratio, PSRs, and water ice harboring might exist. Our results suggest that differences in the target's surface properties may explain the obtained results. The resulting d/D ratios of craters with PSRs can help to select target areas for future In-Situ Resource Utilization (ISRU) missions.}, language = {en} } @article{MargariaSteffen2009, author = {Margaria, Tiziana and Steffen, Bernhard}, title = {Continuous model-driven engineering}, issn = {0018-9162}, year = {2009}, abstract = {Agility at the customer, user, and application level will prove key to aligning and linking business and IT}, language = {en} } @article{MeineckeHarmelingMueller2005, author = {Meinecke, Frank C. and Harmeling, Stefan and M{\"u}ller, Klaus-Robert}, title = {Inlier-based ICA with an application to superimposed images}, issn = {0899-9457}, year = {2005}, abstract = {This paper proposes a new independent component analysis (ICA) method which is able to unmix overcomplete mixtures of sparce or structured signals like speech, music or images. Furthermore, the method is designed to be robust against outliers, which is a favorable feature for ICA algorithms since most of them are extremely sensitive to outliers. Our approach is based on a simple outlier index. However, instead of robustifying an existing algorithm by some outlier rejection technique we show how this index can be used directly to solve the ICA problem for super-Gaussian sources. The resulting inlier-based ICA (IBICA) is outlier-robust by construction and can be used for standard ICA as well as for overcomplete ICA (i.e. more source signals than observed signals). (c) 2005 Wiley Periodicals, Inc}, language = {en} } @article{MeineckeZieheKurthsetal.2005, author = {Meinecke, Frank C. and Ziehe, Andreas and Kurths, J{\"u}rgen and M{\"u}ller, Klaus-Robert}, title = {Measuring phase synchronization of superimposed signals}, issn = {0031-9007}, year = {2005}, abstract = {Phase synchronization is an important phenomenon that occurs in a wide variety of complex oscillatory processes. Measuring phase synchronization can therefore help to gain fundamental insight into nature. In this Letter we point out that synchronization analysis techniques can detect spurious synchronization, if they are fed with a superposition of signals such as in electroencephalography or magnetoencephalography data. We show how techniques from blind source separation can help to nevertheless measure the true synchronization and avoid such pitfalls}, language = {en} } @article{MeinelKlotz2006, author = {Meinel, Christoph and Klotz, Volker}, title = {The first 10 years of the ECCC digital library}, doi = {10.1145/1107458.1107484}, year = {2006}, language = {en} } @article{MeinelLeifer2012, author = {Meinel, Christoph and Leifer, Larry}, title = {Design thinking research}, isbn = {978-3-642-31990-7}, year = {2012}, language = {en} } @article{MeinelLeifer2011, author = {Meinel, Christoph and Leifer, Larry}, title = {Design thinking research}, isbn = {978-3-642-13756-3}, year = {2011}, language = {en} } @article{MeinelLeifer2012, author = {Meinel, Christoph and Leifer, Larry}, title = {Design thinking research}, year = {2012}, language = {en} } @article{MeinelWang2006, author = {Meinel, Christoph and Wang, Long}, title = {Building content clusters based on modelling page pairs}, doi = {10.1007/11610113_85}, year = {2006}, abstract = {We give a new view on building content clusters from page pair models. We measure the heuristic importance within every two pages by computing the distance of their accessed positions in usage sessions. We also compare our page pair models with the classical pair models used in information theories and natural language processing, and give different evaluation methods to build the reasonable content communities. And we finally interpret the advantages and disadvantages of our models from detailed experiment results}, language = {en} } @article{Menzel2010, author = {Menzel, Michael}, title = {Modelling security in service-oriented architectures}, isbn = {978-3-86956-036-6}, year = {2010}, language = {en} } @article{MetrefCosmeLeSommeretal.2019, author = {Metref, Sammy and Cosme, Emmanuel and Le Sommer, Julien and Poel, Nora and Brankart, Jean-Michel and Verron, Jacques and Gomez Navarro, Laura}, title = {Reduction of spatially structured errors in Wide-Swath altimetric satellite data using data assimilation}, series = {Remote sensing}, volume = {11}, journal = {Remote sensing}, number = {11}, publisher = {MDPI}, address = {Basel}, issn = {2072-4292}, doi = {10.3390/rs11111336}, pages = {21}, year = {2019}, abstract = {The Surface Water and Ocean Topography (SWOT) mission is a next generation satellite mission expected to provide a 2 km-resolution observation of the sea surface height (SSH) on a two-dimensional swath. Processing SWOT data will be challenging because of the large amount of data, the mismatch between a high spatial resolution and a low temporal resolution, and the observation errors. The present paper focuses on the reduction of the spatially structured errors of SWOT SSH data. It investigates a new error reduction method and assesses its performance in an observing system simulation experiment. The proposed error-reduction method first projects the SWOT SSH onto a subspace spanned by the SWOT spatially structured errors. This projection is removed from the SWOT SSH to obtain a detrended SSH. The detrended SSH is then processed within an ensemble data assimilation analysis to retrieve a full SSH field. In the latter step, the detrending is applied to both the SWOT data and an ensemble of model-simulated SSH fields. Numerical experiments are performed with synthetic SWOT observations and an ensemble from a North Atlantic, 1/60 degrees simulation of the ocean circulation (NATL60). The data assimilation analysis is carried out with an ensemble Kalman filter. The results are assessed with root mean square errors, power spectrum density, and spatial coherence. They show that a significant part of the large scale SWOT errors is reduced. The filter analysis also reduces the small scale errors and allows for an accurate recovery of the energy of the signal down to 25 km scales. In addition, using the SWOT nadir data to adjust the SSH detrending further reduces the errors.}, language = {en} } @article{MichallekGenskeNiehuesetal.2022, author = {Michallek, Florian and Genske, Ulrich and Niehues, Stefan Markus and Hamm, Bernd and Jahnke, Paul}, title = {Deep learning reconstruction improves radiomics feature stability and discriminative power in abdominal CT imaging}, series = {European Radiology}, volume = {32}, journal = {European Radiology}, number = {7}, publisher = {Springer}, address = {New York}, issn = {1432-1084}, doi = {10.1007/s00330-022-08592-y}, pages = {4587 -- 4595}, year = {2022}, abstract = {Objectives To compare image quality of deep learning reconstruction (AiCE) for radiomics feature extraction with filtered back projection (FBP), hybrid iterative reconstruction (AIDR 3D), and model-based iterative reconstruction (FIRST). Methods Effects of image reconstruction on radiomics features were investigated using a phantom that realistically mimicked a 65-year-old patient's abdomen with hepatic metastases. The phantom was scanned at 18 doses from 0.2 to 4 mGy, with 20 repeated scans per dose. Images were reconstructed with FBP, AIDR 3D, FIRST, and AiCE. Ninety-three radiomics features were extracted from 24 regions of interest, which were evenly distributed across three tissue classes: normal liver, metastatic core, and metastatic rim. Features were analyzed in terms of their consistent characterization of tissues within the same image (intraclass correlation coefficient >= 0.75), discriminative power (Kruskal-Wallis test p value < 0.05), and repeatability (overall concordance correlation coefficient >= 0.75). Results The median fraction of consistent features across all doses was 6\%, 8\%, 6\%, and 22\% with FBP, AIDR 3D, FIRST, and AiCE, respectively. Adequate discriminative power was achieved by 48\%, 82\%, 84\%, and 92\% of features, and 52\%, 20\%, 17\%, and 39\% of features were repeatable, respectively. Only 5\% of features combined consistency, discriminative power, and repeatability with FBP, AIDR 3D, and FIRST versus 13\% with AiCE at doses above 1 mGy and 17\% at doses >= 3 mGy. AiCE was the only reconstruction technique that enabled extraction of higher-order features. Conclusions AiCE more than doubled the yield of radiomics features at doses typically used clinically. Inconsistent tissue characterization within CT images contributes significantly to the poor stability of radiomics features.}, language = {en} } @article{MiddelanisWillnerOttoetal.2021, author = {Middelanis, Robin and Willner, Sven N. and Otto, Christian and Kuhla, Kilian and Quante, Lennart and Levermann, Anders}, title = {Wave-like global economic ripple response to Hurricane Sandy}, series = {Environmental research letters : ERL / Institute of Physics}, volume = {16}, journal = {Environmental research letters : ERL / Institute of Physics}, number = {12}, publisher = {IOP Publ. Ltd.}, address = {Bristol}, issn = {1748-9326}, doi = {10.1088/1748-9326/ac39c0}, pages = {11}, year = {2021}, abstract = {Tropical cyclones range among the costliest disasters on Earth. Their economic repercussions along the supply and trade network also affect remote economies that are not directly affected. We here simulate possible global repercussions on consumption for the example case of Hurricane Sandy in the US (2012) using the shock-propagation model Acclimate. The modeled shock yields a global three-phase ripple: an initial production demand reduction and associated consumption price decrease, followed by a supply shortage with increasing prices, and finally a recovery phase. Regions with strong trade relations to the US experience strong magnitudes of the ripple. A dominating demand reduction or supply shortage leads to overall consumption gains or losses of a region, respectively. While finding these repercussions in historic data is challenging due to strong volatility of economic interactions, numerical models like ours can help to identify them by approaching the problem from an exploratory angle, isolating the effect of interest. For this, our model simulates the economic interactions of over 7000 regional economic sectors, interlinked through about 1.8 million trade relations. Under global warming, the wave-like structures of the economic response to major hurricanes like the one simulated here are likely to intensify and potentially overlap with other weather extremes.}, language = {en} } @article{MikaRaetschWestonetal.2000, author = {Mika, Sebastian and R{\"a}tsch, Gunnar and Weston, J. and Sch{\"o}lkopf, B. and Smola, Alexander J. and M{\"u}ller, Klaus-Robert}, title = {Invariant feature extraction and classification in kernel spaces}, year = {2000}, language = {en} } @article{MileoSchaub2007, author = {Mileo, Alessandra and Schaub, Torsten}, title = {Qualitative constraint enforcement in advanced policy specification}, year = {2007}, language = {en} } @article{MileoSchaub2006, author = {Mileo, Alessandra and Schaub, Torsten}, title = {Extending ordered disjunctions for policy enforcement : preliminary report}, year = {2006}, language = {en} } @article{MileoSchaubMericoetal.2011, author = {Mileo, Alessandra and Schaub, Torsten and Merico, Davide and Bisiani, Roberto}, title = {Knowledge-based multi-criteria optimization to support indoor positioning}, series = {Annals of mathematics and artificial intelligence}, volume = {62}, journal = {Annals of mathematics and artificial intelligence}, number = {3-4}, publisher = {Springer}, address = {Dordrecht}, issn = {1012-2443}, doi = {10.1007/s10472-011-9241-2}, pages = {345 -- 370}, year = {2011}, abstract = {Indoor position estimation constitutes a central task in home-based assisted living environments. Such environments often rely on a heterogeneous collection of low-cost sensors whose diversity and lack of precision has to be compensated by advanced techniques for localization and tracking. Although there are well established quantitative methods in robotics and neighboring fields for addressing these problems, they lack advanced knowledge representation and reasoning capacities. Such capabilities are not only useful in dealing with heterogeneous and incomplete information but moreover they allow for a better inclusion of semantic information and more general homecare and patient-related knowledge. We address this problem and investigate how state-of-the-art localization and tracking methods can be combined with Answer Set Programming, as a popular knowledge representation and reasoning formalism. We report upon a case-study and provide a first experimental evaluation of knowledge-based position estimation both in a simulated as well as in a real setting.}, language = {en} } @article{MontavonBraunKruegeretal.2013, author = {Montavon, Gregoire and Braun, Mikio L. and Kr{\"u}ger, Tammo and M{\"u}ller, Klaus-Robert}, title = {Analyzing local structure in Kernel-Based learning}, series = {IEEE signal processing magazine}, volume = {30}, journal = {IEEE signal processing magazine}, number = {4}, publisher = {Inst. of Electr. and Electronics Engineers}, address = {Piscataway}, issn = {1053-5888}, doi = {10.1109/MSP.2013.2249294}, pages = {62 -- 74}, year = {2013}, language = {en} } @article{MorosovGoesselHartje1999, author = {Morosov, Andrej and G{\"o}ssel, Michael and Hartje, Hendrik}, title = {Reduced area overhead of the input party for code-disjoint circuits}, year = {1999}, language = {en} } @article{MorosovSaposhnikovGoessel1998, author = {Morosov, Andrej and Saposhnikov, V. V. and G{\"o}ssel, Michael}, title = {Self-Checking circuits with unidiectionally independent outputs}, year = {1998}, language = {en} } @article{MorosovSaposhnikovSaposhnikovetal.1997, author = {Morosov, Andrej and Saposhnikov, Vl. V. and Saposhnikov, V. V. and G{\"o}ssel, Michael}, title = {Design of self dual fault-secure combinational circuits}, year = {1997}, language = {en} } @article{MoschaninSaposhnikovSaposhnikovetal.1996, author = {Moschanin, Wladimir and Saposhnikov, Vl. V. and Saposhnikov, Va. V. and G{\"o}ssel, Michael}, title = {Synthesis of self-dual multi-output combinational circuits for on-line Teting}, year = {1996}, language = {en} } @article{MoshaninOtscheretnijDimitriev1998, author = {Moshanin, Vl. and Otscheretnij, Vitalij and Dimitriev, Alexej}, title = {The impact of logic optimization on concurrent error detection}, year = {1998}, language = {en} } @article{MuellerVigarioMeineckeetal.2004, author = {M{\"u}ller, Klaus-Robert and Vigario, R. and Meinecke, Frank C. and Ziehe, Andreas}, title = {Blind source separation techniques for decomposing event-related brain signals}, issn = {0218-1274}, year = {2004}, abstract = {Recently blind source separation (BSS) methods have been highly successful when applied to biomedical data. This paper reviews the concept of BSS and demonstrates its usefulness in the context of event-related MEG measurements. In a first experiment we apply BSS to artifact identification of raw MEG data and discuss how the quality of the resulting independent component projections can be evaluated. The second part of our study considers averaged data of event-related magnetic fields. Here, it is particularly important to monitor and thus avoid possible overfitting due to limited sample size. A stability assessment of the BSS decomposition allows to solve this task and an additional grouping of the BSS components reveals interesting structure, that could ultimately be used for gaining a better physiological modeling of the data}, language = {en} } @article{NaujokatNeubauerLamprechtetal.2014, author = {Naujokat, Stefan and Neubauer, Johannes and Lamprecht, Anna-Lena and Steffen, Bernhard and Joerges, Sven and Margaria, Tiziana}, title = {Simplicity-first model-based plug-in development}, series = {Software : practice \& experience}, volume = {44}, journal = {Software : practice \& experience}, number = {3}, publisher = {Wiley-Blackwell}, address = {Hoboken}, issn = {0038-0644}, doi = {10.1002/spe.2243}, pages = {277 -- 297}, year = {2014}, abstract = {In this article, we present our experience with over a decade of strict simplicity orientation in the development and evolution of plug-ins. The point of our approach is to enable our graphical modeling framework jABC to capture plug-in development in a domain-specific setting. The typically quite tedious and technical plug-in development is shifted this way from a programming task to the modeling level, where it can be mastered also by application experts without programming expertise. We show how the classical plug-in development profits from a systematic domain-specific API design and how the level of abstraction achieved this way can be further enhanced by defining adequate building blocks for high-level plug-in modeling. As the resulting plug-in models can be compiled and deployed automatically, our approach decomposes plug-in development into three phases where only the realization phase requires plug-in-specific effort. By using our modeling framework jABC, this effort boils down to graphical, tool-supported process modeling. Furthermore, we support the automatic completion of process sketches for executability. All this will be illustrated along the most recent plug-in-based evolution of the jABC framework, which witnessed quite some bootstrapping effects.}, language = {en} } @article{NeumannStoffelHartjeetal.1999, author = {Neumann, I. and Stoffel, Dominik and Hartje, Hendrik and Kunz, Wolfgang}, title = {Cell replication and redundancy elimination during placement for cycle time optimization}, year = {1999}, language = {en} } @article{NicolasSchaub1998, author = {Nicolas, Pascal and Schaub, Torsten}, title = {The XRay system : an implementation platform for local query-answering in default logics}, isbn = {3-540-65312-0}, year = {1998}, language = {en} } @article{NienhausDoellner2005, author = {Nienhaus, Marc and D{\"o}llner, J{\"u}rgen Roland Friedrich}, title = {Depicting dynamics using principles of visual art and narration's}, issn = {0272-1716}, year = {2005}, language = {en} } @article{Noack2014, author = {Noack, Franziska}, title = {CREADED: Colored-Relief application for digital elevation data}, series = {Process design for natural scientists: an agile model-driven approach}, journal = {Process design for natural scientists: an agile model-driven approach}, number = {500}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-662-45005-5}, issn = {1865-0929}, pages = {186 -- 199}, year = {2014}, abstract = {In the geoinformatics field, remote sensing data is often used for analyzing the characteristics of the current investigation area. This includes DEMs, which are simple raster grids containing grey scales representing the respective elevation values. The project CREADED that is presented in this paper aims at making these monochrome raster images more significant and more intuitively interpretable. For this purpose, an executable interactive model for creating a colored and relief-shaded Digital Elevation Model (DEM) has been designed using the jABC framework. The process is based on standard jABC-SIBs and SIBs that provide specific GIS functions, which are available as Web services, command line tools and scripts.}, language = {en} } @article{NoweskiScheerBuettneretal.2012, author = {Noweski, Christine and Scheer, Andrea and B{\"u}ttner, Nadja and Thienen, Julia von and Erdmann, Johannes and Meinel, Christoph}, title = {Towards a paradigm shift in education practice : developing twenty-first century skills with design thinking}, isbn = {978-3-642-31990-7}, year = {2012}, language = {en} } @article{OcheretnijGoesselSogomonyanetal.2006, author = {Ocheretnij, Vitalij and G{\"o}ssel, Michael and Sogomonyan, Egor S. and Marienfeld, Daniel}, title = {Modulo p=3 checking for a carry select adder}, doi = {10.1007/s10836-006-6260-8}, year = {2006}, abstract = {In this paper a self-checking carry select adder is proposed. The duplicated adder blocks which are inherent to a carry select adder without error detection are checked modulo 3. Compared to a carry select adder without error detection the delay of the MSB of the sum of the proposed adder does not increase. Compared to a self-checking duplicated carry select adder the area is reduced by 20\%. No restrictions are imposed on the design of the adder blocks}, language = {en} } @article{OmranianMuellerRoeberNikoloski2015, author = {Omranian, Nooshin and M{\"u}ller-R{\"o}ber, Bernd and Nikoloski, Zoran}, title = {Segmentation of biological multivariate time-series data}, series = {Scientific reports}, volume = {5}, journal = {Scientific reports}, publisher = {Nature Publ. Group}, address = {London}, issn = {2045-2322}, doi = {10.1038/srep08937}, pages = {6}, year = {2015}, abstract = {Time-series data from multicomponent systems capture the dynamics of the ongoing processes and reflect the interactions between the components. The progression of processes in such systems usually involves check-points and events at which the relationships between the components are altered in response to stimuli. Detecting these events together with the implicated components can help understand the temporal aspects of complex biological systems. Here we propose a regularized regression-based approach for identifying breakpoints and corresponding segments from multivariate time-series data. In combination with techniques from clustering, the approach also allows estimating the significance of the determined breakpoints as well as the key components implicated in the emergence of the breakpoints. Comparative analysis with the existing alternatives demonstrates the power of the approach to identify biologically meaningful breakpoints in diverse time-resolved transcriptomics data sets from the yeast Saccharomyces cerevisiae and the diatom Thalassiosira pseudonana.}, language = {en} } @article{OnodaRaetschMueller2000, author = {Onoda, T. and R{\"a}tsch, Gunnar and M{\"u}ller, Klaus-Robert}, title = {An asymptotic analysis and improvement of AdaBoost in the binary classification case (in Japanese)}, year = {2000}, language = {en} } @article{OstrowskiSchaub2012, author = {Ostrowski, Max and Schaub, Torsten}, title = {ASP modulo CSP The clingcon system}, series = {Theory and practice of logic programming}, volume = {12}, journal = {Theory and practice of logic programming}, publisher = {Cambridge Univ. Press}, address = {New York}, issn = {1471-0684}, doi = {10.1017/S1471068412000142}, pages = {485 -- 503}, year = {2012}, abstract = {We present the hybrid ASP solver clingcon, combining the simple modeling language and the high performance Boolean solving capacities of Answer Set Programming (ASP) with techniques for using non-Boolean constraints from the area of Constraint Programming (CP). The new clingcon system features an extended syntax supporting global constraints and optimize statements for constraint variables. The major technical innovation improves the interaction between ASP and CP solver through elaborated learning techniques based on irreducible inconsistent sets. A broad empirical evaluation shows that these techniques yield a performance improvement of an order of magnitude.}, language = {en} } @article{OtscheretnijGoesselSaposhnikovetal.1998, author = {Otscheretnij, Vitalij and G{\"o}ssel, Michael and Saposhnikov, Vl. V. and Saposhnikov, V. V.}, title = {Fault-tolerant self-dual circuits with error detection by parity- and group parity prediction}, year = {1998}, language = {en} } @article{OtscheretnijSaposhnikovSaposhnikovetal.1999, author = {Otscheretnij, Vitalij and Saposhnikov, Vl. V. and Saposhnikov, V. V. and G{\"o}ssel, Michael}, title = {Fault-tolerant self-dual circuits}, year = {1999}, language = {en} } @article{Overdick2010, author = {Overdick, Hagen}, title = {Introducing the model mapper enactor pattern}, isbn = {978-3-86956-036-6}, year = {2010}, language = {en} } @article{PabloAlarconArroyoBordihnetal.2015, author = {Pablo Alarcon, Pedro and Arroyo, Fernando and Bordihn, Henning and Mitrana, Victor and Mueller, Mike}, title = {Ambiguity of the multiple interpretations on regular languages}, series = {Fundamenta informaticae}, volume = {138}, journal = {Fundamenta informaticae}, number = {1-2}, publisher = {IOS Press}, address = {Amsterdam}, issn = {0169-2968}, doi = {10.3233/FI-2015-1200}, pages = {85 -- 95}, year = {2015}, abstract = {A multiple interpretation scheme is an ordered sequence of morphisms. The ordered multiple interpretation of a word is obtained by concatenating the images of that word in the given order of morphisms. The arbitrary multiple interpretation of a word is the semigroup generated by the images of that word. These interpretations are naturally extended to languages. Four types of ambiguity of multiple interpretation schemata on a language are defined: o-ambiguity, internal ambiguity, weakly external ambiguity and strongly external ambiguity. We investigate the problem of deciding whether a multiple interpretation scheme is ambiguous on regular languages.}, language = {en} }