Towards an Information Theoretic Framework for Evolutionary Learning

Created by W.Langdon from gp-bibliography.bib Revision:1.4420

@PhdThesis{Card:thesis,
  author =       "Stuart William Card",
  title =        "Towards an Information Theoretic Framework for
                 Evolutionary Learning",
  school =       "Electrical Engineering and Computer Science, Syracuse
                 University",
  year =         "2011",
  address =      "USA",
  month =        aug,
  email =        "cards@ntcnet.com",
  keywords =     "genetic algorithms, genetic programming, diversity,
                 ensemble model, evolvability, fitness, information
                 distance, mutual information",
  URL =          "https://surface.syr.edu/eecs_etd/307",
  URL =          "https://surface.syr.edu/cgi/viewcontent.cgi?article=1311&context=eecs_etd",
  size =         "219 pages",
  abstract =     "The vital essence of evolutionary learning consists of
                 information flows between the environment and the
                 entities differentially surviving and reproducing
                 therein. Gain or loss of information in individuals and
                 populations due to evolutionary steps should be
                 considered in evolutionary algorithm theory and
                 practice. Information theory has rarely been applied to
                 evolutionary computation, a lacuna that this
                 dissertation addresses, with an emphasis on objectively
                 and explicitly evaluating the ensemble models implicit
                 in evolutionary learning. Information theoretic
                 functionals can provide objective, justifiable,
                 general, computable, commensurate measures of fitness
                 and diversity.

                 We identify information transmission channels implicit
                 in evolutionary learning. We define information
                 distance metrics and indices for ensembles. We extend
                 Price's Theorem to non-random mating, give it an
                 effective fitness interpretation and decompose it to
                 show the key factors influencing heritability and
                 evolvability. We argue that heritability and
                 evolvability of our information theoretic indicators
                 are high. We illustrate use of our indices for
                 reproductive and survival selection. We develop
                 algorithms to estimate information theoretic quantities
                 on mixed continuous and discrete data via the empirical
                 copula and information dimension. We extend statistical
                 resampling. We present experimental and real world
                 application results: chaotic time series prediction;
                 parity; complex continuous functions; industrial
                 process control; and small sample social science data.
                 We formalize conjectures regarding evolutionary
                 learning and information geometry.",
  notes =        "Information Theoretic Evaluations of Ensembles.
                 Corollaries to Price's Theorem

                 Supervisor: Chilukuri K. Mohan",
}

Genetic Programming entries for Stu Card

Citations