A Statistical Learning Perspective of Genetic Programming

Created by W.Langdon from gp-bibliography.bib Revision:1.3872

@InProceedings{Gelly:2009:eurogp,
  author =       "Nur Merve Amil and Nicolas Bredeche and 
                 Christian Gagn{\'e} and Sylvain Gelly and Marc Schoenauer and 
                 Olivier Teytaud",
  title =        "A Statistical Learning Perspective of Genetic
                 Programming",
  booktitle =    "Proceedings of the 12th European Conference on Genetic
                 Programming, EuroGP 2009",
  year =         "2009",
  editor =       "Leonardo Vanneschi and Steven Gustafson and 
                 Alberto Moraglio and Ivanoe {De Falco} and Marc Ebner",
  volume =       "5481",
  series =       "LNCS",
  pages =        "327--338",
  address =      "Tuebingen",
  month =        apr # " 15-17",
  organisation = "EvoStar",
  publisher =    "Springer",
  keywords =     "genetic algorithms, genetic programming, poster",
  isbn13 =       "978-3-642-01180-1",
  DOI =          "doi:10.1007/978-3-642-01181-8_28",
  abstract =     "This paper proposes a theoretical analysis of Genetic
                 Programming (GP) from the perspective of statistical
                 learning theory, a well grounded mathematical toolbox
                 for machine learning. By computing the
                 Vapnik-Chervonenkis dimension of the family of programs
                 that can be inferred by a specific setting of GP, it is
                 proved that a parsimonious fitness ensures universal
                 consistency. This means that the empirical error
                 minimization allows convergence to the best possible
                 error when the number of test cases goes to infinity.
                 However, it is also proved that the standard method
                 consisting in putting a hard limit on the program size
                 still results in programs of infinitely increasing size
                 in function of their accuracy. It is also shown that
                 cross-validation or hold-out for choosing the
                 complexity level that optimizes the error rate in
                 generalization also leads to bloat. So a more
                 complicated modification of the fitness is proposed in
                 order to avoid unnecessary bloat while nevertheless
                 preserving universal consistency.",
  notes =        "Also known as \cite{DBLP:conf/eurogp/AmilBGGST09}

                 Part of \cite{conf/eurogp/2009} EuroGP'2009 held in
                 conjunction with EvoCOP2009, EvoBIO2009 and
                 EvoWorkshops2009",
}

Genetic Programming entries for Nur Merve Amil Nicolas Bredeche Christian Gagne Sylvain Gelly Marc Schoenauer Olivier Teytaud

Citations