@inproceedings{304a26a1cc5e48d19acd37deaba4aa17,
title = "Graceful scaling on uniform versus steep-tailed noise",
abstract = "Recently, different evolutionary algorithms (EAs) have been analyzed in noisy environments. The most frequently used noise model for this was additive posterior noise (noise added after the fitness evaluation) taken from a Gaussian distribution. In particular, for this setting it was shown that the (μ + 1)-EA on OneMax does not scale gracefully (higher noise cannot efficiently be compensated by higher μ). In this paper we want to understand whether there is anything special about the Gaussian distribution which makes the (μ + 1)-EA not scale gracefully. We keep the setting of posterior noise, but we look at other distributions. We see that for exponential tails the (μ + 1)-EA on OneMax does also not scale gracefully, for similar reasons as in the case of Gaussian noise. On the other hand, for uniform distributions (as well as other, similar distributions) we see that the (μ + 1)-EA on OneMax does scale gracefully, indicating the importance of the noise model.",
keywords = "Evolutionary algorithm, Noisy fitness, Theory",
author = "Tobias Friedrich and Timo K{\"o}tzing and Krejca, {Martin S.} and Sutton, {Andrew M.}",
year = "2016",
month = jan,
day = "1",
doi = "10.1007/978-3-319-45823-6_71",
language = "English (US)",
isbn = "9783319458229",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer- Verlag",
pages = "761--770",
editor = "Emma Hart and Ben Paechter and Julia Handl and Manuel L{\'o}pez-Ib{\'a}{\~n}ez and Lewis, {Peter R.} and Gabriela Ochoa",
booktitle = "Parallel Problem Solving from Nature - 14th International Conference, PPSN 2016, Proceedings",
note = "14th International Conference on Parallel Problem Solving from Nature, PPSN 2016 ; Conference date: 17-09-2016 Through 21-09-2016",
}