@article{KoetzingLagodzinskiLengleretal.2020, author = {K{\"o}tzing, Timo and Lagodzinski, Gregor J. A. and Lengler, Johannes and Melnichenko, Anna}, title = {Destructiveness of lexicographic parsimony pressure and alleviation by a concatenation crossover in genetic programming}, series = {Theoretical computer science}, volume = {816}, journal = {Theoretical computer science}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0304-3975}, doi = {10.1016/j.tcs.2019.11.036}, pages = {96 -- 113}, year = {2020}, abstract = {For theoretical analyses there are two specifics distinguishing GP from many other areas of evolutionary computation: the variable size representations, in particular yielding a possible bloat (i.e. the growth of individuals with redundant parts); and also the role and the realization of crossover, which is particularly central in GP due to the tree-based representation. Whereas some theoretical work on GP has studied the effects of bloat, crossover had surprisingly little share in this work.
We analyze a simple crossover operator in combination with randomized local search, where a preference for small solutions minimizes bloat (lexicographic parsimony pressure); we denote the resulting algorithm Concatenation Crossover GP. We consider three variants of the well-studied MAJORITY test function, adding large plateaus in different ways to the fitness landscape and thus giving a test bed for analyzing the interplay of variation operators and bloat control mechanisms in a setting with local optima. We show that the Concatenation Crossover GP can efficiently optimize these test functions, while local search cannot be efficient for all three variants independent of employing bloat control. (C) 2019 Elsevier B.V. All rights reserved.}, language = {en} } @article{KrejcaWitt2020, author = {Krejca, Martin S. and Witt, Carsten}, title = {Lower bounds on the run time of the Univariate Marginal Distribution Algorithm on OneMax}, series = {Theoretical computer science : the journal of the EATCS}, volume = {832}, journal = {Theoretical computer science : the journal of the EATCS}, publisher = {Elsevier}, address = {Amsterdam [u.a.]}, issn = {0304-3975}, doi = {10.1016/j.tcs.2018.06.004}, pages = {143 -- 165}, year = {2020}, abstract = {The Univariate Marginal Distribution Algorithm (UMDA) - a popular estimation-of-distribution algorithm - is studied from a run time perspective. On the classical OneMax benchmark function on bit strings of length n, a lower bound of Omega(lambda + mu root n + n logn), where mu and lambda are algorithm-specific parameters, on its expected run time is proved. This is the first direct lower bound on the run time of UMDA. It is stronger than the bounds that follow from general black-box complexity theory and is matched by the run time of many evolutionary algorithms. The results are obtained through advanced analyses of the stochastic change of the frequencies of bit values maintained by the algorithm, including carefully designed potential functions. These techniques may prove useful in advancing the field of run time analysis for estimation-of-distribution algorithms in general.}, language = {en} } @article{DoerrKrejca2020, author = {Doerr, Benjamin and Krejca, Martin S.}, title = {Significance-based estimation-of-distribution algorithms}, series = {IEEE transactions on evolutionary computation}, volume = {24}, journal = {IEEE transactions on evolutionary computation}, number = {6}, publisher = {Institute of Electrical and Electronics Engineers}, address = {New York, NY}, issn = {1089-778X}, doi = {10.1109/TEVC.2019.2956633}, pages = {1025 -- 1034}, year = {2020}, abstract = {Estimation-of-distribution algorithms (EDAs) are randomized search heuristics that create a probabilistic model of the solution space, which is updated iteratively, based on the quality of the solutions sampled according to the model. As previous works show, this iteration-based perspective can lead to erratic updates of the model, in particular, to bit-frequencies approaching a random boundary value. In order to overcome this problem, we propose a new EDA based on the classic compact genetic algorithm (cGA) that takes into account a longer history of samples and updates its model only with respect to information which it classifies as statistically significant. We prove that this significance-based cGA (sig-cGA) optimizes the commonly regarded benchmark functions OneMax (OM), LeadingOnes, and BinVal all in quasilinear time, a result shown for no other EDA or evolutionary algorithm so far. For the recently proposed stable compact genetic algorithm-an EDA that tries to prevent erratic model updates by imposing a bias to the uniformly distributed model-we prove that it optimizes OM only in a time exponential in its hypothetical population size. Similarly, we show that the convex search algorithm cannot optimize OM in polynomial time.}, language = {en} }