@article{BruecknerKanzowScheffer2012, author = {Br{\"u}ckner, Michael and Kanzow, Christian and Scheffer, Tobias}, title = {Static prediction games for adversarial learning problems}, series = {Journal of machine learning research}, volume = {13}, journal = {Journal of machine learning research}, publisher = {Microtome Publishing}, address = {Cambridge, Mass.}, issn = {1532-4435}, pages = {2617 -- 2654}, year = {2012}, abstract = {The standard assumption of identically distributed training and test data is violated when the test data are generated in response to the presence of a predictive model. This becomes apparent, for example, in the context of email spam filtering. Here, email service providers employ spam filters, and spam senders engineer campaign templates to achieve a high rate of successful deliveries despite the filters. We model the interaction between the learner and the data generator as a static game in which the cost functions of the learner and the data generator are not necessarily antagonistic. We identify conditions under which this prediction game has a unique Nash equilibrium and derive algorithms that find the equilibrial prediction model. We derive two instances, the Nash logistic regression and the Nash support vector machine, and empirically explore their properties in a case study on email spam filtering.}, language = {en} } @article{BickelBruecknerScheffer2009, author = {Bickel, Steffen and Br{\"u}ckner, Michael and Scheffer, Tobias}, title = {Discriminative learning under covariate shift}, issn = {1532-4435}, year = {2009}, abstract = {We address classification problems for which the training instances are governed by an input distribution that is allowed to differ arbitrarily from the test distribution-problems also referred to as classification under covariate shift. We derive a solution that is purely discriminative: neither training nor test distribution are modeled explicitly. The problem of learning under covariate shift can be written as an integrated optimization problem. Instantiating the general optimization problem leads to a kernel logistic regression and an exponential model classifier for covariate shift. The optimization problem is convex under certain conditions; our findings also clarify the relationship to the known kernel mean matching procedure. We report on experiments on problems of spam filtering, text classification, and landmine detection.}, language = {en} }