@unpublished{PrasseGrubenMachlikaetal.2016, author = {Prasse, Paul and Gruben, Gerrit and Machlika, Lukas and Pevny, Tomas and Sofka, Michal and Scheffer, Tobias}, title = {Malware Detection by HTTPS Traffic Analysis}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-100942}, pages = {10}, year = {2016}, abstract = {In order to evade detection by network-traffic analysis, a growing proportion of malware uses the encrypted HTTPS protocol. We explore the problem of detecting malware on client computers based on HTTPS traffic analysis. In this setting, malware has to be detected based on the host IP address, ports, timestamp, and data volume information of TCP/IP packets that are sent and received by all the applications on the client. We develop a scalable protocol that allows us to collect network flows of known malicious and benign applications as training data and derive a malware-detection method based on a neural networks and sequence classification. We study the method's ability to detect known and new, unknown malware in a large-scale empirical study.}, language = {en} } @article{LandwehrKuehnSchefferetal.2016, author = {Landwehr, Niels and Kuehn, Nicolas M. and Scheffer, Tobias and Abrahamson, Norman A.}, title = {A Nonergodic Ground-Motion Model for California with Spatially Varying Coefficients}, series = {Bulletin of the Seismological Society of America}, volume = {106}, journal = {Bulletin of the Seismological Society of America}, publisher = {Seismological Society of America}, address = {Albany}, issn = {0037-1106}, doi = {10.1785/0120160118}, pages = {2574 -- 2583}, year = {2016}, abstract = {Traditional probabilistic seismic-hazard analysis as well as the estimation of ground-motion models (GMMs) is based on the ergodic assumption, which means that the distribution of ground motions over time at a given site is the same as their spatial distribution over all sites for the same magnitude, distance, and site condition. With a large increase in the number of recorded ground-motion data, there are now repeated observations at given sites and from multiple earthquakes in small regions, so that assumption can be relaxed. We use a novel approach to develop a nonergodic GMM, which is cast as a varying-coefficient model (VCM). In this model, the coefficients are allowed to vary by geographical location, which makes it possible to incorporate effects of spatially varying source, path, and site conditions. Hence, a separate set of coefficients is estimated for each source and site coordinate in the data set. The coefficients are constrained to be similar for spatially nearby locations. This is achieved by placing a Gaussian process prior on the coefficients. The amount of correlation is determined by the data. The spatial correlation structure of the model allows one to extrapolate the varying coefficients to a new location and trace the corresponding uncertainties. The approach is illustrated with the Next Generation Attenuation-West2 data set, using only Californian records. The VCM outperforms a traditionally estimated GMM in terms of generalization error and leads to a reduction in the aleatory standard deviation by similar to 40\%, which has important implications for seismic-hazard calculations. The scaling of the model with respect to its predictor variables such as magnitude and distance is physically plausible. The epistemic uncertainty associated with the predicted ground motions is small in places where events or stations are close and large where data are sparse.}, language = {en} } @article{DickScheffer2016, author = {Dick, Uwe and Scheffer, Tobias}, title = {Learning to control a structured-prediction decoder for detection of HTTP-layer DDoS attackers}, series = {Machine learning}, volume = {104}, journal = {Machine learning}, publisher = {Springer}, address = {Dordrecht}, issn = {0885-6125}, doi = {10.1007/s10994-016-5581-9}, pages = {385 -- 410}, year = {2016}, abstract = {We focus on the problem of detecting clients that attempt to exhaust server resources by flooding a service with protocol-compliant HTTP requests. Attacks are usually coordinated by an entity that controls many clients. Modeling the application as a structured-prediction problem allows the prediction model to jointly classify a multitude of clients based on their cohesion of otherwise inconspicuous features. Since the resulting output space is too vast to search exhaustively, we employ greedy search and techniques in which a parametric controller guides the search. We apply a known method that sequentially learns the controller and the structured-prediction model. We then derive an online policy-gradient method that finds the parameters of the controller and of the structured-prediction model in a joint optimization problem; we obtain a convergence guarantee for the latter method. We evaluate and compare the various methods based on a large collection of traffic data of a web-hosting service.}, language = {en} }