Penalty Functions for Genetic Programming Algorithms

Created by W.Langdon from gp-bibliography.bib Revision:1.3973

@InProceedings{conf/iccsa/MontanaABD11,
  author =       "Jose L. Montana and Cesar Luis Alonso and 
                 Cruz Enrique Borges and Javier {de la Dehesa}",
  title =        "Penalty Functions for Genetic Programming Algorithms",
  booktitle =    "Proceedings of the International Conference on
                 Computational Science and Its Applications (ICCSA 2011)
                 Part {I}",
  year =         "2011",
  editor =       "Beniamino Murgante and Osvaldo Gervasi and 
                 Andres Iglesias and David Taniar and Bernady O. Apduhan",
  volume =       "6782",
  pages =        "550--562",
  series =       "Lecture Notes in Computer Science",
  address =      "Santander, Spain",
  month =        jun # " 20-23",
  publisher =    "Springer",
  keywords =     "genetic algorithms, genetic programming, symbolic
                 regression, inductive learning, regression model
                 selection",
  isbn13 =       "978-3-642-21927-6",
  DOI =          "doi:10.1007/978-3-642-21928-3_40",
  size =         "13 pages",
  abstract =     "Very often symbolic regression, as addressed in
                 Genetic Programming (GP), is equivalent to approximate
                 interpolation. This means that, in general, GP
                 algorithms try to fit the sample as better as possible
                 but no notion of generalisation error is considered. As
                 a consequence, overfitting, code-bloat and noisy data
                 are problems which are not satisfactorily solved under
                 this approach. Motivated by this situation we review
                 the problem of Symbolic Regression under the
                 perspective of Machine Learning, a well founded
                 mathematical toolbox for predictive learning. We
                 perform empirical comparisons between classical
                 statistical methods (AIC and BIC) and methods based on
                 Vapnik-Chrevonenkis (VC) theory for regression problems
                 under genetic training. Empirical comparisons of the
                 different methods suggest practical advantages of
                 VC-based model selection. We conclude that VC theory
                 provides methodological framework for complexity
                 control in Genetic Programming even when its technical
                 results seems not be directly applicable. As main
                 practical advantage, precise penalty functions founded
                 on the notion of generalisation error are proposed for
                 evolving GP-trees.",
  affiliation =  "Departamento de Matematicas, Estadistica y
                 Computacion, Universidad de Cantabria, 39005 Santander,
                 Spain",
  bibdate =      "2011-06-20",
  bibsource =    "DBLP,
                 http://dblp.uni-trier.de/db/conf/iccsa/iccsa2011-1.html#MontanaABD11",
}

Genetic Programming entries for Jose Luis Montana Arnaiz Cesar Luis Alonso Cruz Enrique Borges Javier de la Dehesa

Citations