@proceedings{ANNGA93,
title = {Proceedings of the International Conference on Artificial
Neural Networks and Genetic Algorithms},
year = {1993},
month = {April 14-16},
editor = {Rudolf F. Albrecht and Colin R. Reeves and Nigel C. Steele},
publisher = {Springer-Verlag}
}
@proceedings{AlifeI,
title = {Artificial Life: the Proceedings of an Interdisciplinary
Workshop on the Synthesis and Simulation of Living Systems},
year = {1989},
month = {September},
editor = {Christopher G. Langton},
publisher = {Addison-Wesley},
address = {Redwood City, CA},
note = {Workshop held September, 1987 in Los Alamos, New Mexico}
}
@proceedings{AlifeII,
title = {Artificial Life II: Proceedings of the Workshop on
Artificial Life},
year = {1992},
editor = {Christopher G. Langton and Charles Taylor and J. Doyne Farmer
and Steen Rasmussen},
publisher = {Addison-Wesley},
address = {Redwood City, Calif.},
note = {Workshop held February, 1990 in Santa Fe, New Mexico}
}
@proceedings{AlifeIII,
title = {Artificial Life III: Proceedings of the Workshop on
Artificial Life},
year = {1994},
editor = {Christopher G. Langton},
publisher = {Addison-Wesley},
address = {Reading, MA},
note = {Workshop held June, 1992 in Santa Fe, New Mexico}
}
@proceedings{COGANN92,
title = {International Workshop on Combinations of Genetic Algorithms
and Neural Networks: COGANN-92},
year = {1992},
editor = {L. D. Whitley and J. D. Schaffer},
publisher = {IEEE Computer Society Press},
address = {Los Alamiitos, California}
}
@proceedings{FOGA1,
title = {Proceedings of the Workshop on Foundations of Genetic
Algorithms},
year = {1991},
editor = {Gregory J. E. Rawlins},
publisher = {Morgan Kaufmann},
address = {San Mateo, California}
}
@proceedings{FOGA2,
title = {Proceedings of the Workshop on Foundations of Genetic
Algorithms},
year = {1993},
editor = {Darrell L. Whitley},
publisher = {Morgan Kaufmann},
address = {San Mateo, California},
note = {The second workshop on Foundations of Genetic Algorithms (FOGA)
was held July 26-29, 1992 in Vail, Colorado}
}
@proceedings{ICGA85,
title = {Proceedings of the First International Conference on Genetic
Algorithms and their Applications},
year = {1985},
month = {July 24-26},
editor = {John J. Grefenstette},
publisher = {Lawrence Erlbaum Associates},
address = {Pittsburgh, Pa}
}
@proceedings{ICGA87,
title = {Proceedings of the Second International Conference on
Genetic Algorithms and their Applications},
organization = {Massachusetts Institute of Technology, Cambridge, MA},
year = {1987},
month = {July 28-31},
editor = {John J. Grefenstette},
publisher = {Lawrence Erlbaum Associates},
address = {Hillsdale, New Jersey}
}
@proceedings{ICGA89,
title = {Proceedings of the Third International Conference on
Genetic Algorithms},
organization = {George Mason University},
year = {1989},
month = {June 4-7},
editor = {J. David Schaffer},
publisher = {Morgan Kaufmann},
address = {San Mateo, California}
}
@proceedings{ICGA91,
title = {Proceedings of the Fourth International Conference on
Genetic Algorithms},
organization = {University of California, San Diego},
year = {1991},
month = {July 13-16},
editor = {Richard K. Belew and Lashon B. Booker},
publisher = {Morgan Kaufmann},
address = {San Mateo, CA}
}
@proceedings{ICGA93,
title = {Proceedings of the Fifth International Conference on
Genetic Algorithms},
organization = {University of Illinois at Urbana Champaign},
year = {1993},
month = {July 17-21},
editor = {Stephanie Forrest},
publisher = {Morgan Kaufmann},
address = {San Mateo, CA}
}
@proceedings{PPSN91,
title = {Proceedings of the First Conference on Parallel
Problem Solving from Nature},
year = {1991},
month = {October 1-3},
editor = {Hans-Paul Schwefel and Reinhart M{\"a}nner},
publisher = {Springer-Verlag},
address = {Dortmund, Germany},
volume = {496},
series = {Lecture Notes in Computer Science}
}
@proceedings{PPSN92,
title = {Proceedings of the Second Conference on Parallel
Problem Solving from Nature, Brussels, Belgium},
year = {1992},
month = {September 28-30},
editor = {Reinhart M{\"a}nner and Bernhard Manderick},
publisher = {Elsevier},
address = {Amsterdam}
}
@conference{Ackley85,
key = {genetic algorithm, boltzmann, connectionism, cogann ref},
author = {David H. Ackley},
title = {A Connectionist Algorithm for Genetic Search},
booktitle = {Proceedings of the First International Conference on Genetic
Algorithms and their Applications},
year = {1985},
editor = {John. J Grefenstette},
publisher = {Lawrence Erlbaum Associates},
address = {Hillsdale, New Jersey},
pages = {121-135},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@book{Ackley87,
key = {connectionism, genetic algorithm, sigh, stochastic iterated,
cogann ref},
author = {David H. Ackley},
title = {A Connectionist Machine for Genetic Hillclimbing},
year = {1987},
publisher = {Kluwer Academic Publishers},
address = {Boston, MA},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Ackley92,
author = {David H. Ackley and Michael L. Littman},
title = {Interactions between Learning and Evolution},
booktitle = {Artificial Life II},
year = {1992},
editor = {Christopher G. Langton and Charles Taylor and
J. Doyne Farmer and Steen Rasmussen},
publisher = {Addison},
pages = {487-509},
annote = {connectionism genetic algorithm
neighborhood mate selection, cogann ref animat},
topology = {feed-forward},
network = { },
encoding = {direct},
evolves = {parameters, connectivity},
applications = {simulated world}
}
@inproceedings{Alba93,
key = {genetic algorithms connectionism neural networks cogann},
author = {E. Alba and J.F. Aldana and J.M. Troya},
title = {Genetic Algorithms as Heuristics for Optimizing ANN Design},
booktitle = {Proceedings of the International Conference on Artificial
Neural Nets and Genetic Algorithms},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C.},
publisher = {Springer-Verlag},
pages = {683-690},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@article{Angeline94,
key = {genetic algorithms connectionism neural networks
cogann programming},
author = {Peter J. Angeline and Gregory M. Saunders and
Jordan B. Pollack},
title = {An Evolutionary Algorithm that Constructs Recurrent
Neural Networks},
journal = {IEEE Transactions on Neural Networks},
year = {1994},
volume = {5},
pages = {54-64},
topology = {recurrent},
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@article{Ankenbrandt90,
key = {Connectionism, fuzzy logic, pattern recognition},
author = {C. A. Ankenbrandt and B. P. Buckles and F. E. Petry},
title = {Scene Recognition using Genetic Algorithms with Semantic Nets},
journal = {Pattern Recognition Letters},
year = {1990},
month = {April},
volume = {11},
pages = {285-293},
publisher = {North-Holland},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {image recognition}
}
@inproceedings{Arena93,
key = {genetic algorithms connectionism neural networks cogann},
author = {P. Arena and R. Caponetto and I. Fortuna and M. G. Xibilia},
title = {MLP Optimal Topology via Genetic Algorithms},
booktitle = {Proceedings of the International Conference on Artificial
Neural Nets and Genetic Algorithms},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C.},
publisher = {Springer-Verlag},
pages = {670-674},
topology = {multi-layered},
network = {multi-layer perceptron},
encoding = { },
evolves = {connectivity},
applications = { }
}
@inproceedings{Austin91,
key = {algorithms connectionism, cogann ref},
author = {Scott Austin},
title = {Genetic Neurosynthesis},
booktitle = {Proceedings of AIAA Aerospace VIII},
year = {1991},
month = {October},
address = {Baltimore, MD},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Ball90,
key = {connectionism, cogann ref},
author = {N. Ball},
title = {Adaptive Signal Processing via Genetic Algorithms and
Self-organizing Neural Networks},
booktitle = {Proceedings of the IEEE Workshop on Genetic Algorithms,
Simulated Annealing and Neural Networks},
year = {1990},
address = {University of Glasgow, Scotland},
network = {self-organizing},
encoding = { },
evolves = { },
applications = {signal processing}
}
@inproceedings{Ball93,
key = {genetic algorithms connectionism neural networks cogann},
author = {N. R. Ball},
title = {Towards the Development of Cognitive Maps in
Classifier Systems},
booktitle = {Proceedings of the International Conference on Artificial
Neural Nets and Genetic Algorithms},
year = {1993},
pages = {712-718},
topology = { },
network = { },
encoding = {indirect, classifier systems},
evolves = { },
applications = {cognitive maps}
}
@article{Beer92,
key = {genetic algorithms GENESIS, connectionism},
author = {Randall D. Beer and John C. Gallagher},
title = {Evolving Dynamical Neural Networks for Adaptive Behavior},
journal = {Adaptive Behavior},
year = {1992},
volume = {1},
number = {1},
topology = {recurrent},
network = { },
encoding = { },
evolves = {connectivity},
applications = {animat controller}
}
@conference{Belew89,
key = {hybrid learning, connectionism, cogann ref},
author = {Richard K. Belew},
title = {When Both Individuals and Populations Search: Adding Simple
Learning to the Genetic Algorithm},
booktitle = {Proceedings of the Third International Conference on Genetic
Algorithms},
organization = {ICGA89},
year = {1989},
editor = {Schaffer, J. D},
publisher = {Morgan Kaufmann},
pages = {34-41},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Belew89a,
key = {connectionism, genetic algorithms, cogann ref},
author = {Richard K. Belew},
title = {Evolution, Learning and Culture: Computational Metaphors
for Adaptive Algorithms},
institution = {University of California at San Deigo},
year = {1989},
month = {September},
address = {La Jolla, CA},
type = {CSE Technical Report CS89-156},
publisher = {University of California at San Deigo},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Belew89b,
key = {connectionism, cogann ref},
author = {Richard K. Belew and John McInerney},
title = {Using the Genetic Algorithm to Wire Feed-forward Networks},
institution = {University of California, San Diego},
year = {1989},
month = {May},
address = {La Jolla, CA},
type = {Technical abstract},
note = {Submitted to Neural Information Processing Systems 1989},
publisher = {Computer Science \& Engineering Dept.,
University of California at San Deigo},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@techreport{Belew90,
key = {neural nets, cogann ref},
author = {Richard K. Belew and John McInerney and Nicol N. Schraudolph},
title = {Evolving Networks: Using Genetic Algorithms with
Connectionist Learning},
institution = {University of California at San Diego},
year = {1990},
month = {June},
address = {La Jolla, CA},
type = {CSE Technical Report CS90-174},
publisher = {University of California at San Diego},
topology = {feed-forward},
network = { },
encoding = {direct, developmental},
evolves = {parameters},
applications = { }
}
@inproceedings{Bengio91,
key = {genetic algorithms connectionism neural networks cogann},
author = {Yoshua Bengio and Samy Bengio and Jocelyn Cloutier},
title = {Learning a synaptic learning rule},
booktitle = {Proceedings of the International Joint Conference on Neural
Networks},
year = {1991},
pages = {969},
abstract = {ABSTRACT
Summary form only given, as follows. The Authors discuss
original approach to neural modeling based on the idea of
searching, with learning methods, for a synaptic learning
rule which is biologically plausible and yields networks that
are able to learn to perform difficult tasks. The proposed
method of automatically finding the learning rule relies on
the idea of considering the synaptic modification rule as a
parametric function. This function has local inputs and is
the same in many neurons. The parameters that define this
function can be estimated with known learning methods. For
this optimization, particular attention is given to gradient
descent and genetic algorithms. In both cases, estimation
of this function consists of a joint global optimization of
the synaptic modification function and the networks that are
learning to perform some tasks. Both network architecture and
the learning function can be designed within constraints
derived from biological knowledge.},
topology = { },
network = { },
encoding = { },
evolves = {learning rule},
applications = { }
}
@article{Bergman88,
author = {Aviv Bergman},
title = {Variation and Selection: An Evolutionary Model of Learning in
Neural Networks},
journal = {Neural Networks},
year = {1988},
volume = {1},
number = {1},
pages = {75-},
network = { },
encoding = { },
applications = { }
}
@inproceedings{Bergman89,
key = {genetic algorithms, connectionism recurrent neural networks,
cogann ref},
author = {Aviv Bergman},
title = {Self-Organization by Simulated Evolution},
booktitle = {Lectures in Complex Systems: Proceedings of the 1989
Complex Systems Summer School},
year = {1989},
editor = {E. Jen},
address = {Santa Fe},
network = {self-organizing},
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Bergman87,
key = {genetic algorithms, connectionism, evolution, cogann ref},
author = {Aviv Bergman and Michel Kerszberg},
title = {Breeding Intelligent Automata},
booktitle = {Proceedings of IEEE Conference on Neural Networks},
year = {1987},
month = {June 21-24},
address = {San Diego, CA},
pages = {63-70},
volume = {II},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Bessiere92,
key = {genetic algorithms connectionism neural networks cogann},
author = {P. Bessiere},
title = {Genetic Algorithms Applied to Formal Neural Networks:
Parallel Genetic Implementation of a Boltzmann Machine
and Associated Robotic Experimentations},
booktitle = {Toward a Practice of Autonomous Systems: Proceedings of the
First European Conference on Artificial Life},
year = {1992},
editor = {F.J. Varela and P. Bourgine},
publisher = {MIT Press},
address = {Cambridge, MA, USA},
pages = {310-314},
abstract = {ABSTRACT
Describes a possible application of computing techniques
inspired by natural life mechanisms to an artificial life
creature, namely a small mobile robot, called KitBorg.
Probabilistic inference suggests that any cognitive problem
may be split in two optimization problems. The first
one called the dynamic inference problem is an abstraction
of learning, the second one, namely, the static inference
problem, being a mathematical metaphor of pattern association.
Other optimization technics should be considered in that
context and especially genetic algorithms. The purpose of
this paper is to describe the state of the art of the
investigations which the Author is "akin" about that
question using a parallel genetic algorithm. The author
first "ecall" the principles of probabilistic inference,
then he presents briefly the parallel genetic algorithm and
the ways it is used to deal with both optimization problems,
to finally conclude about ongoing robotic experimentations
and future planned extensions.},
network = {boltzmann},
encoding = { },
evolves = { },
applications = {robot controller}
}
@inproceedings{Bishop93,
key = {genetic algorithms connectionism neural networks cogann
application paint industry},
author = {J.M. Bishop and M.J. Bushnell and A. Usher and S. Westland},
title = {Genetic Optimization of Neural Network Architectures for
Colour Recipe Prediction},
booktitle = {Proceedings of the International Conference on Artificial
Neural Networks and Genetic Algorithms},
year = {1993},
pages = {719-725},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = {optimization}
}
@article{Bornholdt92,
key = {connectionism, cogann ref},
author = {Stephan Bornholdt and Dirk Graudenz},
title = {General Assymetric Neural Networks and Structure Design
by Genetic Algorithms},
journal = {Neural Networks},
year = {1992},
volume = {5},
number = {2},
pages = {327-334},
note = {DESSY 91-046, Deutsches Electronen-Synchrotron, Hamburg,
Germany, MAY 1991},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@techreport{Bornholdt93,
key = {connectionism, cogann},
author = {Stephan Bornholdt and Dirk Graudenz},
title = {General Assymetric Neural Networks and Structure Design by
Genetic Algorithms: A Learning Rule for Temporal Patterns},
institution = {Lawrence Berkeley Laboratory, University of California},
year = {1993},
month = {July},
address = {Berkeley, CA},
type = {HD-THEP-93-26 LBL-34384},
publisher = {Lawrence Berkeley Laboratory, University of California},
abstract = {ABSTRACT
A learning algorithm based on genetic algorithms for
asymmetric neural networks with an arbitrary structure is
presented. It is suited for the learning of temporal
patterns and leads to stable neural networks with feedback.},
topology = {general, recurrent},
network = { },
encoding = { },
evolves = {connectivity},
applications = {temporal pattern recognition}
}
@article{Brassinne93,
key = {genetic algorithms connectionism neural networks cogann},
author = {P. de la Brassinne},
title = {Genetic Algorithms and Learning of Neural Networks},
journal = {Bulletin Scientifique de l'Association des Ingenieurs
Electriciens sortis de l'Institut Electrotechnique Montefiore},
year = {1993},
volume = {106},
number = {1},
pages = {41-,58},
abstract = {ABSTRACT The Author sought to apply genetic algorithms
to two concrete industrial problems which caused trouble
to classical optimization techniques (they were usually
trapped into local minima), without positive results.
One of the reasons was that the solutions among the population
were too close to one another too early in the search
process. Another was the unsuitability of the operators
employed to create new solutions for the neural network
optimization problem. Attempts at application to control
problems, where backpropagation could not be used, yielded
disappointing results except for very simple problems
such as the inverted pendulum. An explanation of these
findings is suggested.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {optimization}
}
@inproceedings{Braun93,
key = {genetic algorithms connectionism neural networks cogann},
author = {H. Braun and J. Weisbrod},
title = {Evolving Neural Feedforward Networks},
booktitle = {Proceedings of the International Conference on Artificial
Neural Networks and Genetic Algorithms},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C},
publisher = {Springer-Verlag},
pages = {25-32},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@article{Brill92,
key = {genetic algorithms, connectionism, cogann ref},
author = {F.Z. Brill and D.E. Brown and W.N. Martin},
title = {Fast Genetic Selection of Features for Neural Network
Classifiers},
journal = {IEEE Transactions on Neural Networks},
year = {1992},
month = {March},
volume = {3},
number = {2},
pages = {324-328},
abstract = {ABSTRACT - The task of classifiers is to determine the
appropriate class name when presented with a sample from
one of several classes. In forming the sample to present
to the classifier, there may be a large number of
measurements one can make. Feature selection addresses
the problem of determining which of these measurements
are the most useful for determining the pattern's class.
In this paper, we describe experiments using a genetic
algorithm for feature selection in the context of
neural network classifiers, specifically, counterpropagation
networks. We present two novel techniques in our application
of genetic algorithms. First, we configure our genetic
algorithm to use an approximate evaluation in order to
reduce significantly the computation required. In
particular, though our desired classifiers are
counterpropagation networks, we use a nearest-neighbor
classifier to evaluate feature sets. We show that the
features selected by this method are effective in the
context of counterpropagation networks. Second, we
propose a method we call training set sampling, in which
only a portion of the training set is used on any given
evaluation. Again, significant computational savings can be
made by using this method, i.e., evaluations can be
made over an order of magnitude faster. This method
selects feature sets that are as good as and occasionally
better for counterpropagation than those chosen by an
evaluation that uses the entire training set.},
topology = {counterpropagation},
network = { },
encoding = { },
evolves = { },
applications = {pattern classification}
}
@article{Bukatova92,
key = {genetic algorithms connectionism neural networks cogann},
author = {I. L. Bukatova},
title = {Evolutionary Computer},
journal = {Proceedings of the RNNS/IEEE Symposium on Neuroinformatics
and Neurocomputers.},
year = {1992},
month = {October 7-10},
volume = {I},
pages = {467-477},
address = {Rostov-on-Don, Russia},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Calvin87,
author = {Calvin, W.H.},
title = {The Brain as a Darwin Machine},
journal = {Nature},
year = {1988},
volume = {330},
pages = {33-43},
network = { },
encoding = { },
applications = { }
}
@techreport{Carugo91,
key = {connectionism, backpropagation, cogann ref},
author = {Marcelo H. Carugo},
title = {Optimization of Parameters of a Neural Network, applied to
Document Recognition, using Genetic Algorithms},
institution = {N.V. Philips},
year = {1991},
address = {Eindhoven, The Netherlands},
type = {Nat. Lab. Technical Note No. 049/91},
publisher = {N.V. Philips},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {parameters},
applications = {image recognition}
}
@inproceedings{Caudell90,
key = {genetic algorithms, neural networks, connectionism,
constrained weights, implementation of neural networks,
electro-optical systems, rms error minimization,
convoluted error surfaces, problem: parity,
parametric connectivity, cogann ref},
author = {Thomas P. Caudell},
title = {Parametric Connectivity: Feasibility of Learning in
Constrained Weight Space},
booktitle = {Proceedings of the International Joint Conference on Neural
Networks},
year = {1990},
pages = {667-675},
volume = {I},
abstract = {Uses constrained (linked) weights (ie, spread networks)
trained via genetic search.},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = {parity}
}
@inproceedings{Caudell89,
key = {neural networks, connectionism, constrained weights,
implementation of neural networks, electro-optical systems,
rms error minimization, convoluted error surfaces,
problem: parity, parametric connectivity, cogann ref},
author = {Thomas P. Caudell and Charles P. Dolan},
title = {Parametric Connectivity: Training of Constrained Networks
using Genetic Algorithms},
booktitle = {Proceedings of the Third International Conference on Genetic
Algorithms},
year = {1989},
pages = {370-374},
abstract = {Uses constrained (linked) weights (ie, spread networks)
trained via genetic search.},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = {parity}
}
@article{Caudill91,
key = {genetic algorithm connectionism, cogann ref},
author = {Maureen Caudill},
title = {Evolutionary Neural Networks},
journal = {AI Expert},
year = {1991},
month = {March},
pages = {28-33},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Chalmers90,
key = {cogann ref},
author = {David J. Chalmers},
title = {The Evolution of Learning: An Experiment in Genetic
Connectionism},
booktitle = {Proceedings of the 1990 Connectionist Summer School},
year = {1990},
editor = {D.S. Touretsky and J.L. Elman and T.J Sejnowski and G.E. Hinton},
publisher = {Morgan Kaufmann},
pages = {81-90},
topology = { },
network = { },
encoding = { },
evolves = {learning rule},
applications = { }
}
@inproceedings{Chang91,
key = {connectionism},
author = {E. Chang and R. Lippmann},
title = {Using Genetic Algorithms to Improve Pattern Classification
Performance},
booktitle = {Neural Information Processing Systems -- NIPS 3},
year = {1991},
publisher = {Morgan Kaufmann},
pages = {797-803},
editors = {David Touretzky},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {pattern classification}
}
@inproceedings{Chen92,
key = {genetic algorithms, connectionism},
author = {Qi Chen and W. A. Weigand},
title = {Neural Net Model of Batch Processes and Optimization Based
on an Extended Genetic Algorithm},
booktitle = {Proceedings of the International Joint Conferenc on Neural
Networks},
year = {1992},
pages = {IV-519 - IV-524},
abstract = {ABSTRACT
This paper investigates the use of neural network for modeling the batch
processes. The consideration of the dynamics of batch processes, a
cascade neural network which is the combination of BPN and Euler's
numerical integration method, is successfully used to model of batch
processes. In terms of this neural network model, an extended genetic
algorithm is adopted to generate the optimal trajectory for improving the
desired process performance. The genetic algorithm is a general
methodology for searching a solution space in a manner analogous to the
natural selection procedure in biological evolution. With the motivation
of modern genetic techonology, the rule-inducer genetic algorithm is
proposed for dynamic optimization of batch processes. The simulation
study of a typical biochemical process shows this neural network modeling
technique has a good generalization of the batch process and the extended
real-value genetic algorithm has a good capability to solve the
complicated dynamical optimization problems.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {optimization}
}
@inproceedings{Chu93,
key = {Connectionism, Genetic Algorithms, cogann},
author = {C. H. Chu and C. R. Chow},
title = {A Genetic Algorithm Approach to Supervised Learning
for Multilayered Networks},
booktitle = {World Congress on Neural Networks},
year = {1993},
pages = {IV744 - IV747},
abstract = {ABSTRACT
A neural network learning algorithm based on genetic
algorithms (GAs) for multilayered networks is described.
The present method does not require that the input-output
pairs for each layer to be known "a priori", since all
modules are trained concurrently. For an N-module system,
N separate pools of chromosomes are maintained and updated.
The algorithm is tested using the 4-bit parity problem and
a classification problem. Experiment results are presented
and discussed.},
topology = {feed-forward},
network = { },
encoding = { },
evolves = { },
applications = {4-parity, classification}
}
@inproceedings{Collins90,
key = {cogann ref, genetic algorithms, connectionism},
author = {R. Collins and D. Jefferson},
title = {An Artificial Neural Network Representation for
Artificial Organisms},
booktitle = {Proceedings of the Conference on Parallel Problem Solving
from Nature},
year = {1990},
pages = {259-263},
topology = {general},
network = { },
encoding = {direct},
evolves = {parameters},
applications = {simulated world}
}
@incollection{Compiani89,
author = {Compiani, M. and Montanari D. and Serra R. and Valastro G.},
title = {Classifier Systems and Neural Networks},
booktitle = {Parallel Architectures and Neural Networks},
year = {1989},
editor = {Caianiello E.R.},
publisher = {World Scientific Press, Singapore},
pages = {33-43},
network = { },
encoding = {indirect, classifier systems},
applications = { }
}
@inproceedings{Das92,
key = {genetic algorithms, connectionism, neural networks},
author = {Rajarshi Das and Darrell Whitley},
title = {Genetic Sparse Distributed Memories.},
booktitle = {Proceedings of the Conference on Combinations of Genetic
Algorithms and Neural Networks},
year = {1992},
pages = {97-107},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Dasgupta92,
key = {genetic algorithms, connectionism, neural networks},
author = {Dipankar Dasgupta and Douglas McGregor},
title = {Designing Application-Specific Neural Networks using
the Structured Genetic Algorithm.},
booktitle = {Proceedings of the International Conference on Combinations
of Genetic Algorithms and Neural Networks},
year = {1992},
pages = {87-96},
topology = {feed-forward},
network = { },
encoding = {direct},
evolves = {parameters},
applications = {xor, 4-2-4 encoder-decoder}
}
@inproceedings{Davis88,
key = {connectionism, neural networks, formal equivalence,
classifier systems, mapping networks to classifiers
genetic algorithms},
author = {Lawrence Davis},
title = {Mapping Classifier Systems into Neural Networks},
booktitle = {Proceedings of the Workshop on Neural Information Processing
Systems 1},
year = {1988},
pages = {49-56},
topology = { },
network = { },
encoding = {indirect, classifier systems},
evolves = { },
applications = { }
}
@conference{Davis89,
key = {novel operators, adaptive parameter optimization,
neural networks, connectionism, cogann ref},
author = {Lawrence Davis},
title = {Adapting Operator Probabilities in Genetic Algorithms},
booktitle = {Proceedings of the Third International Conference on Genetic
Algorithms},
year = {1989},
pages = {61-69},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Davis89a,
author = {Lawrence Davis},
title = {Mapping Neural Networks into Classifier Systems},
booktitle = {Proceedings of the 3rd International Conference on Genetic
Algorithms},
year = {1989},
pages = {375-378},
network = { },
applications = { }
}
@article{DeRouin92,
key = {genetic algorithms connectionism neural networks cogann},
author = {E. DeRouin and J. Brown},
title = {Alternative Learning Methods for Training Neural
Network Classifiers},
journal = {Proceedings of the SPIE - The International Society
for Optical Engineering},
year = {1992},
volume = {1710, pt.1},
pages = {II-474-II-483},
abstract = {ABSTRACT
Neural networks have proven very useful in the field of pattern
classification by mapping input patterns into one of several
categories. Rather than being specifically programmed,
backpropagation networks (BPNs) 'learn' this mapping by
exposure to a training set, a collection of input pattern
samples matched with their corresponding output classification. The
proper construction of this training set is crucial to successful
training of a BPN. One of the criteria to be met for
proper construction of a training set is that each of the
classes must be adequately represented. A class that is
represented less often in the training data may not be learned
as completely or correctly, impairing the network's discrimination
ability. The degree of impairment is a function of (among
other factors) the relative number of samples of each class
used for training. The paper addresses the problem of
unequal representation in training sets by proposing two
alternative methods of learning. One adjusts the learning
rate for each class to achieve user-specified goals. The other
utilizes a genetic algorithm to set the connection weights with
a fitness function based on these same goals. These methods are
tested using both artificial and real-world training data.},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {parameters},
applications = {classification}
}
@article{Dessert92,
key = {genetic algorithms connectionism neural networks cogann},
author = {P.E. Dessert},
title = {Anomaly Detection in Data Using Neural Networks With
natural selection},
journal = {Proceedings of the SPIE - The International Society for
Optical Engineering},
year = {1992},
volume = {1710, pt.1},
pages = {II-725--II-733},
abstract = {ABSTRACT
Frequently, time series data taken off machines contains
erroneous data points due to errors in the measurement of the
data. One such instance of measuring devices recording anomalies
occurs in the crash testing of vehicles. Force and acceleration
data is collected which an engineer inspects for anomalies,
correcting those that are found. Artificial Neural Network (ANN)
technology was successfully applied to this problem to eliminate
the cost and delay of this manual process. The Author employed
" machine learning algorithm that simulates the Darwinian concept
of survival of the fittest known as the Genetic Learning Algorithm
(GLA). By combining the strength of the GLA and ANNs, a
network architecture was created that optimized the size,
speed, and accuracy of the ANN. This hybridized system also
used the GLA to determine the smallest number of inputs into the
ANN that were necessary to detect anomalies in data. This
algorithm is known as GENENET, and is described in the paper.},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@incollection{Dodd91,
key = {genetic algorithms, connectionism, dolphin vocalization},
author = {N. Dodd},
title = {Optimization of Network Structure using Genetic Techniques},
booktitle = {Applications of Artificial Intelligence in Engineering VI},
year = {1991},
editor = {G. Rzevski and R. A. Adey},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@inproceedings{Dodd91a,
key = {connectionism},
author = {N. Dodd and D. Macfarlane and C. Marland},
title = {Optimization of Artificial Neural Network Structure Using
Genetic Techniques Implemented on Multiple Transputers},
booktitle = {Proceedings of Transputing '91},
year = {1991},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@inproceedings{Dolan87,
key = {genetic algorithm, connectionism, evolve neural net
architecture competitive learning, Hebbian learning,
CRAM, cogann ref},
author = {Charles P. Dolan and Michael G. Dyer},
title = {Toward the Evolution of Symbols},
booktitle = {Proceedings of the Second International Conference on
Genetic Algorithms},
year = {1987},
editor = {John J. Grefenstette},
publisher = {Lawrence Erlbaum Associates},
address = {Hillsdale, New Jersey},
pages = {123-131},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Dolan87a,
key = {genetic algorithm, connectionism, evolve neural net
architecture competitive learning, Hebbian learning,
CRAM, cogann ref},
author = {Charles P. Dolan and Michael G. Dyer},
title = {Symbolic Schemata in Connectionist Memories: Role Binding
and the Evolution of Structure},
institution = {AI Laboratory, University of California, Los Angeles},
year = {1987},
type = {Technical Report UCLA-AI-87-11},
publisher = {UCLA AI Laboratory},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Dominic92,
key = {genetic algorithms, connectionism, hill-climbing,
mutation only, cogann ref},
author = {S. Dominic and R. Das and D. Whitley and C. Anderson},
title = {Genetic Reinforcement Learning for Neural Networks},
booktitle = {Proceedings of the International Joint Conference on
Neural Networks},
year = {1992},
pages = {II-71 - II-76},
abstract = {Abstract
The genetic algorithms which have been shown to yield good
performance for neural network weight optimization are really
genetic hill-climbers, with a strong reliance on mutation rather
than hyperplane sampling. Neural control problems are more
appropriate for these genetic hill-climbers than supervised
learning applications because in reinforcement learning
applications gradient information is not directly available.
Genetic reinforcement learning produces competitive results with
AHC, another reinforcement learning paradigm for neural networks
that employs temporal difference methods. The genetic hill-climbing
algorithm appears to be robust over a wide range of learning
conditions.},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = {controller}
}
@inproceedings{Dress87a,
key = {genetic algorithms, connectionism, cogann ref},
author = {W. B. Dress},
title = {Darwinian Optimization of Synthetic Neural Systems},
booktitle = {Proceeding of the IEEE First Annual International
Conference on Neural Networks},
year = {1987},
topology = { },
network = { },
encoding = { },
evolves = {connectivity?},
applications = { }
}
@conference{Dress89,
key = {connectionism, cogann ref},
author = {W. B. Dress},
title = {Genetic Optimization in Synthetic Systems},
year = {1989},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@book{Dress90,
key = {genetic algorithms, connectionism, cogann ref},
author = {W. B. Dress},
title = {Electronic Life and Synthetic Intelligent Systems},
year = {1990},
publisher = {Instrumentation and Controls Division, Oak Ridge Natuional Laboratory},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Dress90a,
key = {genetic algorithms, connectionism, cogann ref},
author = {W. B. Dress},
title = {In-Silico Gene Expression: A Specific Example and
Possible Generalizations},
booktitle = {Proceedings of Emergence and Evolution of Life-Forms},
year = {1990},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Dress87,
key = {connectionism, genetic algorithms, cogann ref},
author = {W. B. Dress and J. R. Knisley},
title = {A Darwinian Approach to Artificial Neural Systems},
booktitle = {1987 IEEE Conference on Systems, Man, and Cybernetics},
year = {1987},
month = {Oct},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Eberhart91,
key = {connectionism, cogann ref},
author = {R. C. Eberhart and R. W. Dobbins},
title = {Designing Neural Network Explanation Facilities Using
Genetic Algorithms},
booktitle = {Proceedings of the International Joint Conference on
Neural Networks},
year = {1991},
pages = {1758-1763},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Eberhart92,
key = {genetic algorithms, connectionism, neural networks},
author = {Russell C. Eberhart},
title = {The Role of Genetic Algorithms in Neural Network
Query-Based Learning and Explanation Facilities},
booktitle = {Proceedings of the Conference on Combinations of Genetic
Algorithms and Neural Networks},
year = {1992},
pages = {169-183},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@book{Edelman87,
author = {Edelman G.M.},
title = {Neural Darwinism: The Theory of Neuronal Group Selection},
year = {1987},
publisher = {Basic Books, New York},
network = { },
applications = { }
}
@article{Elias92a,
key = {genetic algorithms connectionism neural networks cogann},
author = {J.G. Elias},
title = {Target tracking using impulsive analog circuits},
journal = {Proceedings of the SPIE - The International Society for
Optical Engineering},
year = {1992},
volume = {1709, pt.1},
pages = {338-350},
abstract = {ABSTRACT
The electronic architecture and silicon implementation of an
artificial neuron which can be used to process and classify
dynamic signals is described. The electrical circuit
architecture is modeled after complex neurons in the
vertebrate brain which have spatially extensive dendritic tree
structures that support large numbers of synapses. The
circuit is primarily analog and, as in the biological
model system, is virtually immune to process variations
and other factors which often plague more conventional circuits.
The nonlinear circuit is sensitive to both temporal and
spatial signal characteristics but does not make use of
the conventional neural network concept of weights, and as
such does not use multipliers, adders, look-up-tables,
microprocessors or other complex computational devices.
The Author shows "ha" artificial neural networks with
passive dendritic tree structures can be trained, using a specialized
genetic algorithm, to produce control signals useful for
target tracking and other dynamic signal processing applications.},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = {target tracking, signal processing}
}
@inproceedings{Elias92,
key = {genetic algorithms, connectionism, neural networks},
author = {John G. Elias},
title = {Genetic Generation of Connection Patterns for a
Dynamic Artificial Neural Network},
booktitle = {Proceedings of the Conference on Combinations of Genetic
Algorithms and Neural Networks},
year = {1992},
pages = {38-54},
topology = { },
network = { },
encoding = { },
evolves = {connectivity, parameters},
applications = { }
}
@inproceedings{Falcon91,
author = {Falcon, J.F.},
title = {Simulated Evolution of Modular Networks},
booktitle = {Artificial Neural Networks, IWANN91, Granada},
year = {1991},
editor = {Prieto, A.},
publisher = {Lecture notes in Computer Science 540, Springer Verlag},
pages = {204-211},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Farmer86,
key = {immune networks, machine learning},
author = {Farmer, D. J. and Packard, N. H. and Perelson, A. S.},
title = {The immune system, adaptation, and machine learning},
journal = {Physica},
year = {1986},
volume = {22D},
pages = {187-204},
topology = { },
network = { },
encoding = { },
evolves = {feature detectors},
applications = {pattern classification}
}
@inproceedings{Fekadu93,
author = {Fekadu, A.A. and Hines, E.L. and Gardner, J.W.},
title = {Genetic Algorithm Design of Neural Net Based Electronic Nose},
booktitle = {Artificial Neural Networks and Genetic Algorithms},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C.},
publisher = {Springer-Verlag},
pages = {691-698},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Feldman93,
key = {connectionism cogann},
author = {David S. Feldman},
title = {Fuzzy Network Synthesis and Genetic Algorithms},
booktitle = {Proceedings of the Fifth International Conference on
Genetic Algorithms},
year = {1993},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Fenanzo86,
key = {genetic algorithms, connectionism, cogann ref},
author = {Fenanzo~Jr, A. J},
title = {Darwinian Evolution as a Paradigm for AI Research},
journal = {SIGART Newsletter},
year = {1986},
month = {July},
number = {97},
pages = {22-23},
publisher = {Harding Lawson Associates},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@conference{Fielder93,
key = {genetic algorithms connectionism neural networks cogann},
author = {D. Fielder and C.O. Alford},
title = {Counting and Naming Connection Islands on a Grid of Conductors},
booktitle = {Proceedings of the Conference on Artificial Neural Networks and
Genetic Algorithms},
organization = {ANNGA93},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C.},
publisher = {Springer-Verlag},
pages = {731},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@mastersthesis{Floreano92,
author = {Floreano, D.},
title = {Patterns of Interactions in Ecosystems of Neural Networks},
year = {1992},
school = {Neural Computation, Dept of Comp Sci., Univ of Stirling},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Floreano93,
author = {Floreano, D.},
title = {ROBOGEN: a Software Package for Evolutionary Control Systems},
institution = {Cognitive technology laboratory, Trieste},
year = {1993},
number = {93-01},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {control systems robot}
}
@unpublished{Floreano91,
author = {Floreano, D. and Miglino, O. and Parisi, D.},
title = {Emerging Complex Behaviours in Ecosystems of Neural Networks},
year = {1991},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Floreano94,
author = {Floreano, D. and Mondada, F.},
title = {Automatic Creation of an Autonomous Agent: Genetic Evolution
of a Neural-Network Driven Robot},
booktitle = {Proceedings of the Conference on Simulation of Adaptive Behavior},
year = {1994},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {robot controller}
}
@article{Fogel90,
author = {Fogel, D.B. and Fogel, L.J. and Porto, V.W.},
title = {Evolving Neural Networks},
journal = {Biological Cybernetics},
year = {1990},
volume = {63},
pages = {487-493},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Fogel93a,
author = {Fogel, D.B. and Simpson, P.K.},
title = {Evolving Fuzzy Clusters},
booktitle = {Proceedings of the International Conference on Neural Networks},
organization = {ICNN93},
year = {1993},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Fogel93,
author = {Fogel, David B.},
title = {Using Evolutionary Programming to Create Neural
Networks that are Capable of Playing Tic-Tac-Toe},
booktitle = {Proceedings of the American Power Conference},
year = {1993},
publisher = {IEEE},
pages = {875-879},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {tic-tac-toe}
}
@inproceedings{Fogel93b,
author = {David B. Fogel},
title = {Using Evolutionary Programming to Create Neural
Networks that are Capable of Playing Tic-Tac-Toe},
booktitle = {Proceedings of the International Conference on Neural Networks},
year = {1993},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {tic-tac-toe}
}
@techreport{Fogel93c,
key = {genetic algorithms, connectionism, COGANN},
author = {David B. Fogel and Lawrence J. Fogel},
title = {Method and Apparatus for Training a Neural Network
Using Evolutionary Programming},
institution = {United States},
year = {1993},
month = {25 MAY},
type = {Patent 5214746},
pages = {731},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@article{Fontanari91,
key = {genetic algorithm connectionism},
author = {J.F. Fontanari and R. Meir},
title = {Evolving a Learning Algorithm for the Binary Perceptron},
journal = {Network},
year = {1991},
volume = {2},
pages = {353-359},
network = {perceptron},
encoding = { },
evolves = {learning rule},
applications = { }
}
@inproceedings{Freisleben93,
author = {Freisleben, B. and H\H{a}rtfelder},
title = {Optimization of Genetic Algorithms by Genetic Algorithms},
booktitle = {Artificial Neural Nets and Genetic Algorithms},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C.},
publisher = {Springer-Verlag},
pages = {392-399},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Fritzke93,
author = {Bernd Fritzke},
title = {Growing Cell Structures -- A Self-Organizing
Network for Unsupervised and Supervised Learning},
institution = {International Computer Science Institute},
year = {1993},
month = {may},
address = {1947 Center Street, Suit 600, Berkeley, California 94704},
number = {TR-93-026},
topology = {feed-forward},
network = {self-organizing},
encoding = { },
evolves = {feature detectors},
applications = { }
}
@inproceedings{Fullmer92,
key = {genetic algorithms connectionism neural networks cogann},
author = {B. Fullmer and R. Miikkulainen},
title = {Using Marker-Based Genetic Encoding of Neural Networks to
Evolve Finite-State Behaviour},
booktitle = {Toward a Practice of Autonomous Systems. Proceedings of
the First European Conference on Artificial Life},
year = {1992},
editor = {F.J. Varela and P. Bourgine},
publisher = {MIT Press},
address = {Cambridge, MA, USA},
abstract = {ABSTRACT
A new mechanism for genetic encoding of neural networks is
proposed, which is loosely based on the marker structure of biological
DNA. The mechanism allows all aspects of the network structure,
including the number of nodes and their connectivity, to be
evolved through genetic algorithms. The effectiveness of the
encoding scheme is demonstrated in an object recognition task that
requires artificial creatures (whose behavior is driven by a neural
network) to develop high-level finite-state exploration and
discrimination strategies. The task requires solving the sensory-motor
grounding problem, i.e., developing a functional understanding of
the effects that a creature's movement has on its sensory input.},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = {object recognition}
}
@inproceedings{Gallagher92,
author = {Gallagher, J. C. and Beer, R. D},
title = {A Qualitative Dynamical Analysis of Evolved Locomotion Control},
booktitle = {From Animals to Animats, Proceedings of the Second
International Conference on Simuation of Adaptive
Behaviour (SAB 92)},
year = {1992},
editor = {Roitblat, H. and Meyer, J-A. and Wilson, S.},
publisher = {The MIT Press, Cambridge, MA},
topology = {recurrent?},
network = { },
encoding = { },
evolves = { },
applications = {robot controller}
}
@inproceedings{Game93,
author = {Game, G. W. and James, C. D.},
title = {The Application of Genetic Algorithms to the Optimal Selection
of Parameter Values in Neural Networks for Attitude
Control Systems},
booktitle = {IEE Colloquium on 'High Accuracy Platform Control in
Space'},
year = {1993},
publisher = {IEE, London},
pages = {3/1-3/3},
volume = {Digest No. 1993/148},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@techreport{deGaris89,
key = {genetic algorithms, connectionism, cogann ref},
author = {Hugo de Garis},
title = {WALKER, A Genetically Programmed, Time Dependent, Neural
Net Which Teaches a Pair of Sticks to Walk},
institution = {Center for AI, George Mason Univ, Virginia},
year = {1989},
type = {Technical Report},
publisher = {Center for AI, George Mason Univ, Virginia},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {controller}
}
@book{deGaris90,
key = {connectionism, cogann ref},
author = {Hugo de Garis},
title = {Genetic Programming: Building Nanobrains with
Genetically Programmed Neural Network Modules.},
year = {1990},
publisher = {CADEPS AI Research Unit, Universitye
Libre de Bruxelles, CP 194/7, B-1050 Brussels, Belgium},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{deGaris90a,
key = {genetic algorithm GenNets connectionism, cogann ref},
author = {Hugo de Garis},
title = {BRAIN Building with GenNets},
journal = {Proceedings of INNC-90},
year = {1990},
volume = {2},
pages = {1036-1039},
address = {Paris},
publisher = {Kluwer Academic Publishers},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{deGaris90b,
key = {genetic algorithm GenNets connectionism robot control LIZZY, cogann ref},
author = {Hugo de Garis},
title = {Genetic Programming: Evolution of a Time Dependent Neural
Network Module which Teaches a Pair of Stick Legs to Walk},
booktitle = {Proceedings of the 9th European Conference on Artificial
Intelligence},
year = {1990},
month = {AUG 6-10},
address = {Stockholm, Sweden},
pages = {204-206},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {controller}
}
@conference{deGaris92,
key = {genetic algorithms, connectionism},
author = {Hugo de Garis},
title = {Exploring GenNet Behaviors Using Genetic Programming to
Explore Qualitatively New Behaviors in Recurrent Neural Networks},
booktitle = {Proceedings of the International Joint Conference on Neural
Networks},
organization = {IJCNN-92},
year = {1992},
pages = {III-547 - III-552},
abstract = {ABSTRACT
The neural network research community's preoccupation with convergent
networks (until the recent rise of 'recurrent backpropagation' algorithms
[e.g. WILLIAMS \& ZIPSER 1989ab]) has not been unreasonable. Relatively
little analytical work had been done on neural networks whose inputs
and/or outputs are time-dependent, hence few guidelines existed on how to
train such networks. Consequently, research concentrated on more
restrictive 'static' neural nets such as 'feedforward' (Backprop)
[RUMELHART \& McCLELLAND 1986] and "Hopfield" (clamped inputs, convergent
outputs) [HOPFIELD 1982]. This emphasis on convergence was unfortunate,
because the true richness of neural network dynamics is to be found when
inputs and/or outputs are time-dependent. This paper shows that Genetic
Programming techniques (i.e. using Genetic Algorithms to build/evolve
complex systems) can be applied successfully to training nonconvergent
networks, and presents some examples of their extraordinary behavioral
versatility. This paper terminates by comparing GenNet behaviors with
those generated by the new 'recurrent backpropagation' algorithms
[WILLIAMS \& ZIPSER 1989ab]. It is claimed that the GenNet behaviors
are a lot more flexible and interesting because they do not require
the training process to be "closely supervised".},
topology = {recurrent},
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{deGaris92a,
key = {genetic algorithms connectionism neural networks cogann},
author = {Hugo de Garis},
title = {Steerable GenNets: the Genetic Programming of Steerable
Behaviors in GenNets},
booktitle = {Toward a Practice of Autonomous Systems. Proceedings of the
First European Conference on Artificial Life},
year = {1992},
editor = {F.J. Varela and P. Bourgine},
publisher = {MIT Press},
address = {Cambridge, MA, USA},
abstract = {ABSTRACT
Shows how genetic programming techniques (i.e. the art of
applied evolution, or building complex systems using the genetic
algorithm) can be used to evolve dynamic behaviors in neural systems
which are controllable or steerable. The genetic algorithm evolves
the weights of a fully-connnected time-dependent neural network
(called a GenNet), such that the same GenNet is capable of
generating two separate time-dependent behaviors, depending upon
the setting of two different values of a clamped input control
variable. By freezing these weights in the GenNet and then
applying intermediate control values, one obtains intermediate
behaviors, showing that the GenNet has generalized its behavioral
learning. It has become controllable or steerable. This principle
is applicable to the evolution of many controllable neural
behaviors and is useful in the construction of artificial
creatures (with artificial nervous systems) based on neural modules.
One simply evolves two behaviors at different settings of the
control input so that the GenNet generalizes its behavioral learning.
In this paper, a concrete example of this process is given in the
form of the genetic programming of a variable frequency generator GenNet.
This paper ends with a discussion on the handcrafters vs. evolutionists
controversy, concerning future approaches to artificial creature (biot)
building.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@conference{deGaris93,
key = {genetic algorithms connectionism neural networks cogann},
author = {Hugo de Garis},
title = {Circuits of Production Rule GenNets. The Genetic
Programming of Artificial Nervous Systems},
booktitle = {Artificial Neural Nets and Genetic Algorithms},
organization = {ANNGA93},
year = {1993},
pages = {699-705},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@conference{deGaris93a,
key = {connectionism, genetic algorithms, cogann,
Genetic Programming, GenNets Genetically Programmed Neural Network
Modules, Artificial Nervous Systems, Biots Biological Robots,
Darwinian Robotics, 1000-GenNet Biots, GenNet Accelerators,
GenNet Shaping.},
author = {Hugo de Garis},
title = {Incremental Evolution of Neural Networks: Genetic Programming
in Incremental Steps},
booktitle = {Proceedings of the World Congress on Neural Networks},
organization = {WCNN93},
year = {1993},
pages = {II447 - II450},
abstract = {ABSTRACT
This paper addresses itself to the question of Incremental Evolution
of neural networks, which is defined to be the art of evolving
neural networks in incremental steps, using Genetic Algorithms.
One evolves the weight values of a fully connected neural network
(called a GenNet [de Garis 1990, 1993]) containing N neurons to
perform T tasks, and then takes the result (i.e. the evolved weights
of the N neurons) and adds a few more neurons dN, to evolve the
performance of a few more tasks dT. This paper investigates (a) whether
this can be done at all, (b) whether is is faster to evolve an N + dN
GenNet performing T + dT tasks from scratch or to do it incrementally
(1.e. [N,T] then [N+dN,T+dT]), and (c) how the two approaches
(i.e. from scratch or incremental) compare in task performance quality.
Incremental Evolution will become an important issue when the various
brain builder groups around the world (i.e.groups using evolved
neural network modules to build artificial nervous systems for
biological robots (biots), e.g. Beer's group at Case Western Reserve
University USA, Cliff et al's group at Sussex University UK, and
the Author's group "" ATR Japan [de Garis 1993] are confronted with
the decision whether to start from scratch when desiring to evolve
biots with a greater number of behaviors, or to increment their
already evolved nervous systems. Nature obviously had to increment.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Gierer88,
key = {genetic algorithms, connectionism, cogann ref},
author = {A. Gierer},
title = {Spatial Organization and Genetic Information in
Brain Development},
journal = {Biological Cybernetics},
year = {1988},
volume = {59},
pages = {13-21},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@conference{Gonzalez-Seco92,
key = {genetic algorithms, connectionism, GLANN},
author = {Jose Gonzalez-Seco},
title = {A Genetic Algorithm as the Learning Procedure for
Neural Networks},
booktitle = {Proceedings of the International Joint Conference on Neural
Networks},
organization = {IJCNN-92},
year = {1992},
pages = {I-835 - I-840},
abstract = {ABSTRACT
The relationship between genetic algorithms and neural networks has been
somewhat one directional. In most cases a genetic algorithm has been used
to generate better neural networks. In this paper we combine the use of
genetics algorithms and neural networks, but from a conceptually
different point of view. We show that it is possible to use a genetics
algorithm as the learning algorithm for a neural network. In our model
the neural network has a fixed architecture and processes binary strings
using genetic operators.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Gruau92,
author = {Frederic Gruau},
title = {Cellular Encoding of Genetic Neural Network},
institution = {Laboratoire de l'Informatique du Parall\'elisme, Ecole Normale Sup\'erieure de Lyon},
year = {1992},
type = {Research Report 92.21},
topology = {general},
network = { },
encoding = {indirect cellular encoding},
evolves = {connectivity, parameters},
applications = { }
}
@inproceedings{Gruau92a,
author = {Frederic Gruau},
title = {Genetic Synthesis of Boolean Neural Networks with a
Cell Rewriting Developmental Process},
booktitle = {Proceedings of COGANN-92 International
Workshop on Combinations of Genetic Algorithms
and Neural Networks},
year = {1992},
editor = {Whitley, L.D. and Schaffer, J.D.},
publisher = {IEEE Computer Society Press},
pages = {55-74},
topology = {general},
network = { },
encoding = {indirect cellular encoding},
evolves = {connectivity, parameters},
applications = { }
}
@unpublished{Gruau92b,
author = {Frederic Gruau},
title = {Cellular Encoding of Genetic Neural Networks
I. Theoretical Properties},
year = {1992},
note = {submitted to evolutionnary computation},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Gruau93,
author = {Frederic Gruau},
title = {A Learning and Pruning Algorithm for Genetic Neural Networks},
booktitle = {European Symposium on Artificial Neural Networks},
year = {1993},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Gruau93a,
key = {genetic algorithms, connectionism cogann},
author = {Frederic Gruau},
title = {Genetic Synthesis of Modular Neural Networks},
booktitle = {Proceedings of the Fifth International Conference on Genetic
Algorithms},
year = {1993},
topology = {general},
network = { },
encoding = {indirect cellular encoding},
evolves = {connectivity, parameters},
applications = { }
}
@incollection{Gruau94,
author = {Frederic Gruau},
title = {Genetic Micro Programming of Neural Networks},
booktitle = {Advances in Genetic Programming},
year = {1994},
editor = {Kim Kinnear},
publisher = {MIT Press},
topology = {general},
network = { },
encoding = {indirect cellular encoding},
evolves = {connectivity, parameters},
applications = { }
}
@phdthesis{Gruau94a,
author = {Frederic Gruau},
title = {Neural Network Synthesis Using Cellular Encoding
and the Genetic Algorithm},
year = {1994},
school = {PhD Thesis, Ecole Normale Sup\'erieure de Lyon},
note = {anonymous ftp: lip.ens-lyon.fr (140.77.1.11) directory
pub/Rapports/PhD file PhD94-01-E.ps.Z (english) PhD94-01-F.ps.Z (french)},
topology = {general},
network = { },
encoding = {indirect cellular encoding},
evolves = {connectivity, parameters},
applications = { }
}
@techreport{Gruau93b,
author = {Frederic Gruau and Darrell Whitley},
title = {The Cellular Development of Neural Networks: the
Interaction of Learning and Evolution},
institution = {Laboratoire de l'Informatique du Parallelisme, Ecole Normal Superieure de Lyon},
year = {1993},
number = {93-04},
topology = {general},
network = { },
encoding = {indirect cellular encoding},
evolves = {connectivity, parameters},
applications = { }
}
@article{Gruau93c,
author = {Frederic Gruau and Darrell Whitley},
title = {Adding Learning to the Cellular Developmental Process:
a Comparative Study},
journal = {Evolutionary Computation},
year = {1993},
volume = {1},
number = {3},
topology = {general},
network = { },
encoding = {indirect cellular encoding},
evolves = {connectivity},
applications = { }
}
@techreport{Gruau93d,
author = {Frederic Gruau and Darrell Whitley},
title = {Adding Learning to the Cellular Developmental Process:
a Comparative Study},
institution = {Laboratoire de l'Informatique du Parall\'elisme, Ecole Normale Sup\'erieure de Lyon},
year = {1993},
type = {Research Report RR93-04},
topology = {general},
network = { },
encoding = {indirect cellular encoding},
evolves = {connectivity},
applications = { }
}
@article{Gruau93e,
key = {genetic algorithm connectionism neural networks cogann},
author = {Fredric Gruau},
title = {Cellular Encoding as a Graph Grammar},
journal = {IEE Colloquium on Grammatical Inference: Theory,
Applications and Alternatives},
year = {1993},
month = {22-23 April},
volume = {(Digest No.092)},
pages = {17/1-10},
publisher = {IEE},
address = {London},
abstract = {ABSTRACT
Cellular encoding is a method for encoding a family of neural networks
into a set of labeled trees. Such sets of trees can be evolved by
the genetic algorithm so as to find a particular set of trees that
encodes a family of Boolean neural networks for computing a family of
Boolean functions. Cellular encoding is presented as a graph grammar.
A method is proposed for translating a cellular encoding into a
set of graph grammar rewriting rules of the kind used in the
Berlin algebraic approach to graph rewriting. The genetic search
of neural networks via cellular encoding appears as a grammatical
inference process where the language to parse is implicitly specified,
instead of explicitly by positive and negative examples. Experimental
results shows that the genetic algorithm can infer grammars that
derive neural networks for the parity, symmetry and decoder Boolean
function of arbitrary large size.},
topology = {general},
network = { },
encoding = {cellular encoding, graph grammar},
evolves = {connectivity, parameters},
applications = {parity etc}
}
@techreport{Guha92,
key = {connectionism, neural networks, cogann},
author = {Aloke Guha and Steven A. Harp and Tariq Samad},
title = {Genetic Algorithm Synthesis of Neural Networks},
institution = {United States},
year = {1992},
month = {18 AUG},
type = {Patent 5140530},
topology = {feed-forward},
network = { },
encoding = {indirect, developmental},
evolves = {connectivity},
applications = {2-parity, digit recognition, function approximation}
}
@conference{Guo92,
key = {genetic algorithms, connectionism, neural networks},
author = {Zhichao Guo and Robert Uhrig},
title = {Using Genetic Algorithms to Select Inputs for Neural Networks},
booktitle = {Proceedings of the Workshop on Combinations of
Genetic Algorithms and Neural Networks},
organization = {COGANN92},
year = {1992},
pages = {223-234},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Hoffgen91,
author = {H\H{o}ffgen, K-U. and Siemon, H.P. and Ultsch, A},
title = {Genetic Improvement of Feedforward Nets for Approximating
Functions},
booktitle = {Proceedings of the Conference on Parallel Problem
Solving from Nature},
year = {1991},
editor = {Schwefel, H-P. and M\H{a}nner, R.},
publisher = {Lecture notes in Computer Science 496, Springer Verlag},
pages = {302-306},
topology = {feed-forward},
network = { },
encoding = { },
evolves = { },
applications = {function approximation}
}
@inproceedings{Hancock89,
author = {Hancock, P. J. B.},
title = {Optimising Parameters in Neural Net Simulations by
Genetic Algorithm},
booktitle = {Mini-Symposium on Neural Network Computation},
year = {1989},
publisher = {Rank Prize Funds, Broadway: unpublished},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@inproceedings{Hancock90,
key = {connectionism, cogann ref},
author = {P. J. B. Hancock},
title = {GANNET: Design of a Neural Network for Face Recognition
by Genetic Algorithm},
booktitle = {Proceedings of the IEEE Workshop on Genetic Algorithms,
Simulated Annealing and Neural
Networks},
year = {1990},
address = {University of Glasgow, Scotland},
topology = { },
network = { },
encoding = { },
evolves = {connectivity?},
applications = {face recognition}
}
@phdthesis{Hancock92,
author = {Hancock, P. J. B.},
title = {Coding Strategies for Genetic Algorithms and Neural Nets},
year = {1992},
school = {Department of Computing Science and Mathematics,
University of Stirling},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Hancock92a,
author = {Hancock, P. J. B.},
title = {Recombination Operators for the Design of Neural Nets by
Genetic Algorithms},
booktitle = {Parallel Problem Solving from Nature 2},
year = {1992},
editor = {M\H{a}nner, R. and Manderick, B.},
publisher = {Elsevier, North Holland},
pages = {441-450},
topology = { },
network = { },
encoding = { },
evolves = {parameters, connectivity?},
applications = { }
}
@inproceedings{Hancock92b,
author = {Hancock, P. J. B.},
title = {Pruning Neural Nets by Genetic Algorithm},
booktitle = {Proceedings of the International Conference on Artificial
Neural Networks, Brighton},
year = {1992},
editor = {Aleksander, I. and Taylor, J.G.},
publisher = {Elsevier},
pages = {991-994},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Hancock92c,
author = {Hancock, P. J. B.},
title = {Genetic Algorithms and Permutation Problems: a Comparison of
Recombination Operators for Neural Net Structure Specification},
booktitle = {Proceedings of COGANN workshop, IJCNN, Baltimore},
year = {1992},
editor = {Whitley, D.},
publisher = {IEEE},
topology = { },
network = { },
encoding = { },
evolves = {connectivity, parameters},
applications = { }
}
@inproceedings{Hancock91,
author = {Hancock, P. J. B. and Smith, L. S},
title = {GANNET: Genetic Design of a Neural Net for Face Recognition},
booktitle = {Proceedings of the Conference on Parallel Problem Solving from Nature},
year = {1991},
editor = {Schwefel, H-P. and M\H{a}nner, R.},
publisher = {Lecture notes in Computer Science 496, Springer Verlag},
pages = {292-296},
topology = { },
network = { },
encoding = { },
evolves = {connectivity, parameters},
applications = {face recognition}
}
@techreport{Harp89,
author = {Harp, S. A. and Samad, T. and Guha A.},
title = {The Genetic Synthesis of Neural Networks},
institution = {Honeywell CSDD},
year = {1989},
number = {TR CSDD-89-I4852-2},
topology = {feed-forward},
network = { },
encoding = {indirect, developmental},
evolves = {connectivity},
applications = {2-parity, digit recognition, function approximation}
}
@inproceedings{Harp89a,
author = {Harp, S. A. and Samad, T. and Guha, A.},
title = {Towards the Genetic Synthesis of Neural Networks},
booktitle = {Proceedings of the Third International Conference on Genetic Algorithms},
year = {1989},
editor = {Schaffer, J.D.},
publisher = {Morgan Kaufmann},
pages = {360-369},
institution = {Honeywell CSDD},
topology = {feed-forward},
network = { },
encoding = {indirect, developmental},
evolves = {connectivity},
applications = {2-parity, digit recognition, function approximation}
}
@inproceedings{Harp89b,
author = {Harp, S. A. and Samad, T. and Guha, A.},
title = {Designing Application-Specific Neural Networks Using the Genetic Algorithm},
booktitle = {Neural Information Processing Systems 2},
year = {1989},
editor = {Touretzky, D.S.},
institution = {Honeywell CSDD},
topology = {feed-forward},
network = { },
encoding = {indirect, developmental},
evolves = {connectivity},
applications = {2-parity, digit recognition, function approximation}
}
@article{Harp92,
key = {genetic algorithms connectionism neural networks cogann},
author = {Steven A. Harp and Tariq Samad},
title = {Optimizing Neural Networks with Genetic Algorithms},
journal = {Proceedings of the American Power Conference},
year = {1992},
volume = {54 pt 2},
pages = {1138-1143},
publisher = {Illinois Inst of Technology},
address = {Chicago, IL, USA.},
abstract = {ABSTRACT
We describe an approach to application-specific neural network
design using genetic algorithms. A genetic algorithm is a robust
optimization method particularly well suited for search spaces that are
high-dimensional, discontinuous and noisy-features that typify the neural
network design problem. Our approach is relevant to virtually all neural
network applications: it is network-model independent and it permits
optimization for arbitrary, user-defined criteria. We have developed an
experimental system, NeuroGENESYS, and have conducted several experiments
on small-scale problems. Performance improvements over manual
designs have been observed, the interplay between performance
criteria and network design aspects has been demonstrated, and
general design principles have been uncovered.},
topology = {feed-forward},
network = { },
encoding = {indirect, developmental},
evolves = {connectivity},
applications = {2-parity, digit recognition, function approximation}
}
@inproceedings{Harp91a,
key = {algorithms, connectionism, Kohonen,
clustering, vector quantization, cogann ref},
author = {Steven Alex Harp and Tariq Samad},
title = {Genetic Optimization of Self-Organizing Feature Maps},
booktitle = {Proceedings of the International Joint Conference on Neural
Networks},
year = {1991},
pages = {341-346},
journal = {IJCNN-91},
volume = {I},
network = {self-organizing},
encoding = { },
evolves = {connectivity},
applications = { }
}
@inproceedings{Harp91,
author = {Steven Harp and Tariq Samad},
title = {Genetic Synthesis of Neural Network Architecture},
booktitle = {Handbook of Genetic Algorithms},
year = {1991},
editor = {Davis, L.},
publisher = {Van Nostrand Reinhold},
pages = {202-221},
chapter = {15},
topology = {feed-forward},
network = { },
encoding = {indirect, developmental},
evolves = {connectivity},
applications = {2-parity, digit recognition, function approximation}
}
@techreport{Harvey93,
author = {Harvey, I. and Husbands, P. and Cliff, D.},
title = {Genetic Convergence in a Species of Evolved Robot Control
Architectures},
institution = {Cognitive Science, university of Sussex},
year = {1993},
number = {CSRP 267},
mnote = {See also ICGA93},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {robot controller}
}
@conference{Hassoun93,
key = {connectionism, genetic algorithms, cogann},
author = {Mohamad H. Hassoun and Jing Song},
title = {Multilayer Perceptron Learning Via Genetic Search for
Hidden Layer Activations},
booktitle = {Proceedings of the World Congress on Neural Networks},
organization = {WCNN93},
year = {1993},
pages = {III437 - III444},
abstract = {ABSTRACT
A new learning technique is proposed for multilayer neural
networks based on genetic search, in hidden target space, and
gradient descent learning strategies. Our simulations show that
the new algorithm combines the global optimization capabilities
of genetic algorithms with the speed of gradient descent local
search in order to outperform pure descent-based algorithms such
as backpropagation. In addition, we show that genetic search in
hidden target space is less complex than that of weight space.},
topology = {multi-layered},
network = {multi-layer perceptron},
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Heistermann91,
author = {Heistermann, J},
title = {The Application of a Genetic Approach as an Algorithm for
Neural Networks},
booktitle = {Parallel Problem Solving from Nature},
year = {1991},
editor = {Schwefel, H-P. and M\H{a}nner, R.},
publisher = {Lecture notes in Computer Science 496, Springer Verlag},
pages = {297-301},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Heistermann89,
key = {connectionism, genetic algorithms},
author = {J. Heistermann},
title = {Parallel Algorithms for Learning in Neural Networks
with Evolution Strategy},
journal = {Parallel Computing},
year = {1989},
volume = {12},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Heistermann90,
key = {connectionism},
author = {J. Heistermann},
title = {Learning in Neural Nets by Genetic Algorithms},
booktitle = {Parallel Processing in Neural Systems and Computers},
year = {1990},
editor = {R. Eckmiller and G. Hartmann and G. Hauske},
publisher = {Elsevier Science Publishers},
pages = {165-168},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Heistermann92,
key = {genetic algorithms connectionism neural networks cogann},
author = {J. Heistermann},
title = {A Mixed Genetic Approach to the Optimization of
Neural Controllers},
booktitle = {CompEuro 1992 Proceedings. Computer Systems and Software
Engineering},
year = {1992},
editor = {P. Dewilde and J. Vandewalle},
publisher = {IEEE Comput. Soc. Press},
address = {Los Alamitos, CA, USA},
pages = {459-464},
abstract = {ABSTRACT
The Author discusses "om" of the capabilities of genetic algorithms
(GAs). GAs are compared with other standard optimization methods
like gradient descent or simulated annealing (SA). It is shown that SA is
just a special case of GA. The role of a population in the optimization
process is demonstrated by an example. GA was applied as a learning
algorithm to neural networks.},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = {controller}
}
@article{Hinton87,
key = {Neural nets genetic algorithms connectionism, cogann ref},
author = {Geoffrey E. Hinton and Stephen J. Nowlan},
title = {How Learning Can Guide Evolution},
journal = {Complex Systems},
year = {1987},
month = {JUN},
volume = {1},
number = {1},
pages = {495-502},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Hintz90,
key = {connectionism, genetic algorithm (?)},
author = {K.J. Hintz and J.J. Spofford},
title = {Evolving a Neural Network},
booktitle = {Proceedings of the 5th IEEE International Symposium on
Intelligent Control},
year = {1990},
month = {SEPT},
editor = {A. Meystel},
publisher = {IEEE Computer Society Press},
address = {Los Alamitos, CA},
pages = {479-484},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Ho92,
key = {genetic algorithms connectionism neural networks cogann},
author = {A.W. Ho and G.C. Fox},
title = {Competitive-Cooperative System of Distributed Artificial
Neural Agents},
booktitle = {Parallel Computing: Problems, Methods and Applications.
Selection of Papers Presented at the Conference on Parallel Computing:
Achievements, Problems and Prospects},
year = {1992},
editor = {P. Messina and A. Murli},
publisher = {Elsevier, Amsterdam, Netherlands},
pages = {499-507},
abstract = {ABSTRACT
A framework for simulations of hierarchical organizations of
interacting, distributed artificial agents on distributed-memory, MIMD
computers is presented. Interactions among aggregates of intelligent
agents in an organization are restricted to obey competition and
cooperation criteria. Each intelligent agent in an organization
is a parallel implementation of a feedforward multilayer perceptrons
neural network using error backpropagation (BP) as the learning rule.
In this preliminary study, domination, viewed as a type of
deterministic genetic algorithm (GA,) is chosen to be the
preferred form of interaction. The framework exploits the hierarchical
nature intrinsic in an organizational approach to problem-solving.
It takes advantage of parallelism at different levels of
granularity, from domain decomposition within the agents to coarse grain
team-level interaction. Transputer-based simulation results for a test
problem of learning the solution to a parity function of
predicate order 10 is discussed.},
topology = {multi-layered},
network = {multi-layer perceptron},
encoding = { },
evolves = { },
applications = { }
}
@article{Holland92,
key = {genetic algorithms connectionism neural networks cogann
robotics},
author = {O.E. Holland and M.A. Snaith},
title = {Neural Control of Locomotion in a Quadrupedal Robot},
journal = {IEE Proceedings Part F: Radar and Signal Processing},
year = {1992},
month = {DEC},
volume = {139},
number = {6},
pages = {431-436},
abstract = {ABSTRACT
The Authors present "esult" of a first study demonstrating that
the apparently complex task of controlling walking in a real quadrupedal
robot with highly nonlinear interactions between the control elements can
be learned quickly by a crude and simple reinforcement learning
algorithm. They can as yet say little that is useful about the
contribution of reflexes to learned walking, and nothing about the
quality of evolved solutions other than that their discovery by
applying genetic algorithms to real robots is likely to take a
prohibitively long time. However, they hope that their experiences
will point the way to more controlled studies of the applications of
reinforcement learning to real-world problems, especially to
control problems associated with autonomous mobile robots.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {robot controller}
}
@article{Honavar89a,
key = {connectionism neural networks constructive algorithms
inductive learning, local architectures, brain modeling, pattern
classification},
author = {Honavar, V. and Uhr, L.},
title = {Brain-Structured Networks That Perceive and Learn},
journal = {Connection Science},
year = {1989},
volume = {1},
pages = {139-159},
topology = {feed-forward, locally connected, structured,
multi-layered, regular, modular},
network = { },
encoding = { },
evolves = {feature detectors, connectivity, topology},
applications = {pattern classification, vision, brain modeling}
}
@inproceedings{Honavar89b,
key = {connectionism neural networks constructive algorithms
inductive learning, pattern classification},
author = {Honavar, V. and Uhr, L.},
title = {A Network of Neuron-Like Units That Learns by Generation
As Well As Reweighting of its Links},
booktitle = {Proceedings of the 1988 Connectionist Models Summer School},
year = {1989},
publisher = {Morgan Kaufmann, Palo Alto},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {feature detectors, topology},
applications = {pattern classification}
}
@inproceedings{Honavar89c,
key = {connectionism neural networks constructive algorithms
inductive learning, pattern classification},
author = {Honavar, V. and Uhr, L.},
title = {Generation, Local Receptive Fields, and Global Convergence Improve Perceptual Learning in Connectionist Networks},
booktitle = {Proceedings of the Tenth International Joint Conference on Artificial Intelligence},
year = {1989},
publisher = {Morgan Kaufmann, Palo Alto},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {feature detectors, topology},
applications = {pattern classification}
}
@article{Honavar93,
key = {connectionism neural networks constructive algorithms
inductive learning, radial basis functions, pattern
classification},
author = {Honavar, V. and Uhr, L.},
title = {Generative Learning Structures and Processes for
Generalized Connectionist Networks},
journal = {Information Sciences},
year = {1993},
volume = {70},
pages = {75-108},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {feature detectors, connectivity, topology},
applications = {pattern classification}
}
@inproceedings{Hoptroff90,
author = {Hoptroff, R. G. and Hall, T. J. and Burge, R. E.},
title = {Experiments With a Neural Controller},
booktitle = {1990 International Joint Conference on Neural Networks -
IJCNN 90},
year = {1990},
publisher = {IEEE, New York},
pages = {735-740},
volume = {2},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {controller}
}
@inproceedings{Hsu92,
author = {Hsu, L.S. and Wu, Z.B.},
title = {Input Pattern Encoding Through Generalized Adaptive Search},
booktitle = {Proceedings of COGANN-92 International Workshop on Combinations of Genetic Algorithms and Neural Networks},
year = {1992},
editor = {Whitley, L.D. and Schaffer, J.D.},
publisher = {IEEE Computer Society Press},
pages = {235-247},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Huang92,
author = {Huang, R.},
title = {Systems Control With the Genetic Algorithm and the Nearest
Neighbour Classification},
journal = {CC-AI},
year = {1992},
volume = {9(2-3)},
pages = {225-236},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {controller}
}
@techreport{Husbands92,
author = {Husbands, P. and Harvey, I. and Cliff, D. T.},
title = {Analysing Recurrent Dynamical Networks Evolved for
Robot Control},
institution = {University of Sussex, School of Cognitive and
Computing Sciences},
year = {1992},
number = {CSRP265},
topology = {recurrent},
network = { },
encoding = { },
evolves = {connectivity},
applications = {robot controller}
}
@inproceedings{Ichikawa90,
author = {Ichikawa, Y.},
title = {Evolution of Neural Networks and Application to Motion Control},
booktitle = {Proceedings of the IEEE International Conference on
Intelligent Motion Control},
year = {1990},
publisher = {IEEE},
pages = {239-245},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = {controller}
}
@inproceedings{Jacob93,
author = {Jacob, C. and Rehder, J.},
title = {Evolution of Neural Net Architectures by a Hierachical
Grammar-Based Genetic System},
booktitle = {Artificial Neural Nets and Genetic Algorithms},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C.},
publisher = {Springer-Verlag},
pages = {72-79},
topology = { },
network = { },
encoding = {indirect, grammar based},
evolves = {connectivity},
applications = { }
}
@article{Janson92,
key = {genetic algorithms connectionism neural networks cogann},
author = {D.J. Janson and J.F. Frenzel},
title = {Application of Genetic Algorithms to the Training of Higher Order Neural Networks},
journal = {Journal of Systems Engineering},
year = {1992},
volume = {2},
number = {4},
pages = {272-276},
abstract = {ABSTRACT
Product unit neural networks are a new form of feedforward
learning networks in which several summing units are replaced by units
capable of calculating a weighted product of inputs. While such networks
can be trained using traditional backpropagation, the solution
involves the manipulation of complex-valued expressions. As an
alternative, this paper investigates the training of product
networks using genetic algorithms. Results are presented on the
training of a neural network to calculate the optimum width of
transistors in a CMOS switch given desired operating parameters.
It is shown how local minima affect the performance of the
genetic algorithm, and one method of overcoming this is presented.},
topology = {feed-forward},
network = {product-unit networks},
encoding = { },
evolves = {parameters},
applications = { }
}
@techreport{Jefferson90,
author = {Jefferson, D. and Collins, R. and Cooper, C. and Dyer, M.
and Flowers, M. and Korf, R. and Taylor, C. and Wang, A.},
title = {Evolution as a Theme in Artificial Life: the Genesys/Tracker System},
institution = {Computer Science, UCLA},
year = {1990},
number = {UCLA-AI-90-09},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Jones93,
author = {Jones, A.J.},
title = {Genetic Algorithms and Their Applications to the Design of
Neural Networks},
journal = {Neural Computing and Applications},
year = {1993},
volume = {1},
pages = {32-45},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@article{Jones93a,
author = {Jones, A.J. and MacFarlane, D.},
title = {Comparing Networks With Differing Neural-Node
Functions Using Transputer-Based Genetic Algorithms},
journal = {Neural Computing and Applications},
year = {1993},
volume = {1},
pages = {256-267},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@book{Kadaba90a,
key = {vehicle routing, Connectionism, genetic algorithms, XROUTE, expert system, neural network, cogann
ref},
author = {Nagesh Kadaba and Kendall E. Nygard},
title = {Improving the Performance of Genetic Algorithms in
Automated Discovery of Parameters},
year = {1990},
month = {JAN 25},
publisher = {Dept. of SC and OR, North Dakota State University},
note = {Draft},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@conference{Kargupta91,
key = {genetic algorithms selection crowding; relation AI machine
learning connectionist networks genetic algorithms, cogann ref},
author = {Hillol Kargupta and Robert E. Smith},
title = {System Identification with Evolving Polynomial Networks},
booktitle = {Proceedings of the Fourth International Conference on Genetic
Algorithms},
year = {1991},
pages = {370-376},
abstract = {Abstract:
The construction of models for prediction and control of
initially unknown, potentially nonlinear systems is a difficult,
fundamental problem in machine learning and engineering control.
In this paper, a {\em genetic algorithm} (GA) based technique is used
to iteratively form polynomial networks that model the behavior of
nonlinear systems. This approach is motivated by the {\em group
method of data handling} (GMDH) (Ivakhnenko, 1971), but attempts to
overcome the computational overhead and locality associated with the
original GMDH. The approach presented here uses a multi-modal GA
(Deb, 1989) to select nodes for a network based on an
information-theoretic fitness measure. Preliminary results show
that the GA is successful in modeling continuous-time and
discrete-time chaotic systems. Implications and extensions of
this work are discussed.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Karim92,
key = {process control, connectionism, genetic},
author = {M.N. Karim and S.L. Rivera},
title = {Use of Recurrent Neural Networks for Bioprocess
Identification in On-Line Optimization by Micro-Genetic Algorithms},
journal = {Proceedings of the American Control Conference},
year = {1992},
volume = {3},
pages = {1931-1932},
publisher = {American Automatic Control Council},
address = {Green Valley, AZ,},
abstract = {ABSTRACT
The use of recurrent neural networks in bioprocess identification
and optimization is investigated. A recurrent neural network
is trained on a set of fermentation data, and thereafter used as a
nonlinear process model to estimate nonmeasurable process states at
different conditions. With the bioprocess state variable information
available, an optimization technique can be used to generate optimum
controls settings to improve the process performance. This paper explores
the use of Micro-Genetic Algorithms as a technique for bioreactor
optimization. Simulation results will be discussed based in the
fermentative ethanol production by the anaerobic bacteria Zymomonas
mobilis.},
topology = {recurrent},
network = { },
encoding = { },
evolves = {parameters},
applications = {optimization}
}
@inproceedings{Karunanithi92,
author = {Karunanithi, N. and Das, R. and Whitley, D.},
title = {Genetic Cascade Learning for Neural Networks},
booktitle = {Proceedings of COGANN-92 International Workshop on
Combinations of Genetic Algorithms and Neural Networks},
year = {1992},
editor = {Whitley, L.D. and Schaffer, J.D.},
publisher = {IEEE Computer Society Press},
pages = {134-145},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@article{Kauffman86,
author = {Kauffman, S.A. and Smith, R.G.},
title = {Adaptive Automata Based on Darwinian Selection},
journal = {Physica D},
year = {1986},
volume = {22},
pages = {68-82},
institution = {Univ Pennsylvania},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Keesing91,
author = {Keesing, R. and Stork, D.G.},
title = {Evolution and Learning in Neural Networks,
the Number and Distribution of Learning Trials Affect the
Rate of Evolution},
booktitle = {Advances in Neural Information Processing Systems 3},
year = {1991},
editor = {Lippmann, R.P. and Moody, J.E. and Touretzky, D.S},
publisher = {Morgan Kaufmann},
pages = {804-810},
topology = { },
network = { },
encoding = { },
evolves = {learning rule},
applications = { }
}
@book{Kerszberg,
key = {connectionism, cogann ref},
author = {Michel Kerszberg},
title = {Genetic and Epigenetic Factors in Neural Circuit
Wiring (preliminary)},
publisher = {Institut fur Festkorperforschung der},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@incollection{Kerszberg88,
key = {genetic algorithms, connectionism, cogann ref},
author = {Michel Kerszberg and Aviv Bergman},
title = {The Evolution of Data Processing Abilities in
Competing Automata},
booktitle = {Computer Simulation in Brain Science, Copenhagen, Denmark},
year = {1986},
month = {August},
editor = {Cotterill, R.M.J},
publisher = {Cambridge University Press},
pages = {249-259l},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@phdthesis{Kirby88,
key = {genetic algorithms, connectionism, cogann ref},
author = {K.G. Kirby},
title = {Intraneuronal Dynamics and Evolutionary Learning},
year = {1988},
school = {Dept. of Computer Science,
Wayne State University},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Kirby86,
key = {genetic algorithms, connectionism, cogann ref},
author = {K.G. Kirby and Michael Conrad},
title = {Intraneuronal Dynamics as a Substrate for
Evolutionary Learning},
journal = {Physica D},
year = {1986},
volume = {22},
pages = {205-215},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@conference{Kirby89,
key = {genetic algorithms, connectionism, cogann ref},
author = {K.G. Kirby and Michael Conrad and R.R. Kampfner},
title = {Evolutionary Learning in Reaction-Diffusion Neurons},
organization = {SUBMITTED TO Bull. Math. Biol.},
year = {1989},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@conference{Kitano90,
key = {connectionism, cogann ref},
author = {Hiroaki Kitano},
title = {Empirical Studies on the Speed of Convergence of
Neural Network Training Using Genetic Algorithms},
booktitle = {Proceedings of the 8th National Conference on Artificial
Intelligence (AAAI-90)},
organization = {PROC AAAI-90},
year = {1990},
publisher = {MIT Press, Cambridge},
pages = {789-795},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@article{Kitano90a,
key = {connectionism, cogann ref},
author = {Hiroaki Kitano},
title = {Designing Neural Network Using Genetic Algorithm with
Graph Generation System},
journal = {Complex Systems},
year = {1990},
volume = {4},
pages = {461-476},
topology = { },
network = { },
encoding = {graph grammar},
evolves = {connectivity},
applications = { }
}
@techreport{Kitano92,
key = {connectionism, cogann ref},
author = {Hiroaki Kitano},
title = {Neurogenetic Learning: An Integrated Method of
Designing and Training Neural Networks using Genetic Algorithms},
institution = {Carnegie Mellon University},
year = {1992},
month = {MAR},
type = {CMU-CMT-92-134},
topology = { },
network = { },
encoding = { },
evolves = {connectivity, parameters},
applications = { }
}
@article{Kitano93,
key = {genetic algorithms connectionism neural networks cogann},
author = {Hiroaki Kitano},
title = {Continuous Generation Genetic Algorithms},
journal = {Journal of the Society of Instrument and Control Engineers},
year = {1993},
month = {Jan.},
volume = {32},
number = {1},
pages = {31-8},
abstract = {ABSTRACT
Presents a continuous generation genetic algorithm. Most genetic
algorithms use a discrete generation model in which all individuals
in a population synchronize mating period. The discrete generation model,
however, wastes processor time in parallel implementations when the
fitness of each individual (proportionally or reversely) correlates
with the computational cost of its evaluation. An example of such a
task is neural network design and training. In some cases, over
80been wasted. The continuous generation model mitigates this
problem by introducing asynchronous mating, the continuous generation
model increases the number of reproduction per a unit-time over
500discrete model. CPU idle time has been minimized to 1/25. Also, a
significant improvement in convergence speed has been estimated.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Kouchi92,
key = {genetic algorithms connectionism neural networks cogann},
author = {M. Kouchi and H. Inayoshi and T. Hoshino},
title = {Optimization of Neural-Net Structure by Genetic
Algorithm with Diploidy and Geographical Isolation Model},
journal = {Journal of Japanese Society for Artificial Intelligence},
year = {1992},
month = {May},
volume = {7},
number = {3},
pages = {509-517},
abstract = {ABSTRACT
The structure of a simple neural network is optimized by the
use of a genetic-algorithm. The neural network is a perceptron, which has
three outputs; the logical AND, OR and XOR of two inputs The evaluation
function for optimization is a linear combination of the correctness, the
network sizes, and an auxiliary term inducing the optimum solution The
chromosome is a vector of the link weights of the network. The genetic
operators used are crossing-over and point-mutation on the parent
chromosomes Two genetic rules were tested. In the haploidy rule, each
individual has single chromosome, and the offspring is generated by
crossing-over the parents' chromosomes at a randomly chosen locus and
taking one of those crossed-over chromosomes. In the diploidy rule, each
individual has a pair of chromosomes, and the offspring's chromosomes are
generated by combining the gamete produced through the meiosis of the
parents' chromosomes. The other model used in the genetic algorithm is
the geographical isolation model, where the entire population is
divided into four sub-populations, in which the local selection and
reproduction are carried out, though, in some time interval,
randomly sampled individuals are exchanged among sub-populations.
Comparison was made among four combinations of haploid or diploid,
and single-population or multiple sub-populations. Diploidy together
with the sub-population model was proved to be the best for this
optimization problem. Thus, the optimum structure of network was found.},
network = {perceptron},
encoding = { },
evolves = {connectivity},
applications = { }
}
@book{Koza92,
author = {John R. Koza},
title = {Genetic Programming: On the Programming of
Computers by Means of Natural Selection},
year = {1992},
publisher = {MIT Press, Cambridge, Mass.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Koza91,
key = {genetic algorithms, connectionism, one-bit adder, cogann ref},
author = {John R. Koza and James P. Rice},
title = {Genetic Generation of Both the Weights and Architecture
for a Neural Network},
booktitle = {Proceedings of the International Joint Conference on
Neural Networks},
organization = {IJCNN-91},
year = {1991},
pages = {397-404},
journal = {IJCNN-91},
volume = {II},
topology = { },
network = { },
encoding = {genetic programming?},
evolves = {connectivity, parameters},
applications = { }
}
@inproceedings{Krishnakumar92,
author = {Krishnakumar, K.},
title = {Immunized Neurocontrol - Concepts and Initial Results},
booktitle = {Proceedings of COGANN-92 International Workshop on
Combinations of Genetic Algorithms and Neural Networks},
year = {1992},
editor = {Whitley, L.D. and Schaffer, J.D.},
publisher = {IEEE Computer Society Press},
pages = {146-168},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Kwiatkowski93a,
author = {Kwiatkowski, L. and Stromboni, J.P.},
title = {Neuromimetic Algorithm processing: Tools for Design of
Dedicated Architectures},
booktitle = {Artificial Neural Nets and Genetic Algorithms},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C.},
publisher = {Springer-Verlag},
pages = {706-711},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@inproceedings{Lai92,
key = {genetic algorithms, connectionism},
author = {W.K. Lai and G.G. Coghill},
title = {Genetic Breeding of Control Parameters for the Hopfield/Tank
Neural Net},
booktitle = {Proceedings of the International Joint Conference
on Neural Networks},
organization = {IJCNN-92},
year = {1992},
pages = {IV-618 - IV-623},
abstract = {ABSTRACT
Artificial neural networks, especially the Hopfield/Tank neural net
have been used to solve the travelling salesman problem. These networks
usually require a set of parameters to be carefully selected and tuned
to produce sensible solutions. Genetic Algorithms are basically
adaptive systems that transform a population of individuals into new
populations, using relatively simple mechanisms. It has the ability to
efficiently explore the problem sub-space to produce approximate
solutions that are globally competitive. This paper will show how
Genetic Algorithms may be used in conjunction with the Hopfield/Tank
neural net by breeding an effective set of control parameters in the
parameter sub-space to be used by the artificial neural network.},
topology = {hopfield network},
network = { },
encoding = { },
evolves = {parameters},
applications = {travelling salesperson problem}
}
@mastersthesis{Lange93,
author = {Frank Lange},
title = {"Uber den Zusammenhang zwischen Komplexit"at und
Generalisierungsf"ahigkeit Neuronaler Netze},
year = {1993},
school = {Universit"at Karlsruhe, Institut f"ur Logik, Komplexit"at
und Deduktionssysteme},
note = {Beyond Soft-Weight-Sharing: Soft-Entropy-Minimization},
type = {Diplomarbeit},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Lehar87,
author = {Lehar, S. and Weaver, J.},
title = {A Developmental Approach to Neural Network Design},
booktitle = {Proceedings of the IEEE International Conference on Neural Networks},
year = {1987},
publisher = {IEEE Press},
pages = {97-104},
topology = { },
network = { },
encoding = { },
applications = { }
}
@inproceedings{Lewis92a,
key = {genetic algorithms connectionism neural networks cogann
programming},
author = {M. Anthony Lewis and Andrew H. Fagg and Alan Solidum},
title = {Genetic Programming Approach to the Construction
of a Neural Network for Control of a Walking Robot},
booktitle = {Proceedings of IEEE International Conference on
Robotics and Automation},
year = {1992},
publisher = {IEEE},
address = {Piscataway, NJ, USA},
pages = {2618-2623},
volume = {3},
abstract = {ABSTRACT
The Authors describe "h" staged evolution of a complex motor
pattern generator (MPG) for the control of a walking robot.
The experiments were carried out on a six-legged, Brooks-style
insect robot. The MPG was composed of a network of neurons with
weights determined by genetic algorithm optimization. Staged evolution
was used to improve the convergence rate of the algorithm. First,
an oscillator for the individual leg movements was evolved. Then,
a network of these oscillators was evolved to coordinate the movements
of the different legs. By introducing a staged set of manageable
challenges, the algorithm's performance was improved.},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = {robot controller}
}
@inproceedings{Lindgren92a,
author = {Lindgren, K. and Nilsson, A. and Nordahl, M.G. and Rade, I.},
title = {Regular Language Inference Using Evolving Neural Networks},
booktitle = {Proceedings of COGANN-92 International Workshop on
Combinations of Genetic Algorithms and Neural Networks},
year = {1992},
editor = {Whitley, L.D. and Schaffer, J.D.},
publisher = {IEEE Computer Society Press},
pages = {75-86},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {regular language inference}
}
@inproceedings{Lindgren93a,
author = {Lindgren, K. and Nilsson, A. and Nordahl, M.G. and Rade, I.},
title = {Evolving Recurrent Neural Networks},
booktitle = {Artificial Neural Nets and Genetic Algorithms},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C.},
publisher = {Springer-Verlag},
pages = {55-62},
topology = {recurrent},
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@inproceedings{Littman91,
key = {genetic algorithms environment fitness functions dynamic;
biological modeling evolution and learning; hillclimbing, cogann ref
ERL, Evolutionary reinforcement,
non-stationary environment, dynamic , neural networks, connectionism},
author = {Michael L. Littman and David H. Ackley},
title = {Adaptation in Constant Utility Non-Stationary Environments},
booktitle = {Proceedings of the Fourth International Conference on
Genetic Algorithms},
year = {1991},
pages = {136-142},
abstract = {Abstract: Environments that vary over time present
special challenges to adaptive systems. Although in the worst case
there may be no hope of effective adaptation, not all forms of
environmental variability need be so disabling. We consider a broad
class of non-stationary environments, those which combine a variable
*result function* with an invariant *utility function*, and
demonstrate via simulation that an adaptive strategy employing both
evolution and learning can tolerate a much higher rate of
environmental variation than an evolution-only strategy. We suggest
that in many cases where stability has previously been assumed, the
constant utility non-stationary environment may in fact be a more
robust description.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Muhlenbein90a,
author = {Heinz M\H{u}hlenbein},
title = {Limitations of Multi-Layer Perceptron Networks - Steps Towards
Genetic Neural Networks},
journal = {Parallel Computing},
year = {1990},
volume = {14},
pages = {249-260},
topology = {multi-layered},
network = {multi-layer perceptron},
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Muhlenbein92,
key = {genetic algorithms connectionism neural networks cogann},
author = {Heinz M\H{u}hlenbein},
title = {Parallel Genetic Algorithms and Neural Networks as Learning
Machines},
booktitle = {Parallel computing '91 Proceedings of the International
Conference},
year = {1992},
editor = {D. J. Evans and G. R. Joubert and H. Liddell},
publisher = {North-Holland Publishing Co.},
address = {Amsterdam},
pages = {91-103},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@incollection{Muhlenbein89a,
key = {connectionism, cogann ref},
author = {Heinz M\H{u}hlenbein and J{\"o}rg Kindermann},
title = {The Dynamics of Evolution and Learning: Towards Genetic
Neural Networks},
booktitle = {Connectionism in Perspective},
year = {1989},
editor = {R. Pfeifer and Z. Schreter and F. Fogelman-Soulie and
L. Steels},
publisher = {Elsevier Science Publishers B.V. (North-Holland)},
pages = {173-197},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@book{Machado92,
key = {genetic algorithms connectionism neural networks cogann},
author = {Ricardo Jose Machado and Armando Freitas da Rocha},
title = {Evolutive Fuzzy Neural Networks},
year = {1992},
publisher = {IEEE},
address = {Piscataway, NJ},
journal = {1992 IEEE INT CONF Fuzzy Syst FUZZ-IEEE},
pages = {493-500},
abstract = {ABSTRACT
The Authors describe "h" combination of fuzzy neural networks
with genetic algorithms, producing a flexible and powerful learning
paradigm, called evolutive learning. Evolutive learning combines as
complementary tools both inductive learning through synaptic weight
adjustment and deductive learning through the modification of the network
topology to obtain the automatic adaptation of system knowledge to the
problem domain environment. Algorithms for the development of an
evolutive learning machine are presented. A fuzzy criterion based on
entropy is proposed to select the architecture for a fuzzy neural
network best suited to a specific problem domain.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Maeda92,
key = {genetic algorithms connectionism neural networks cogann},
author = {Y. Maeda and Y. Kanata},
title = {A Genetic Algorithm for an Unsupervised Learning of Neural
Networks},
journal = {Engineering \& Technology},
year = {1992},
volume = {10},
number = {2},
pages = {1-7},
abstract = {ABSTRACT
The Authors deal "it" a genetic algorithm for an unsupervised
learning rule of neural networks. The genetic algorithm consists of four
operations: selection; reproduction; crossover; and mutation. They look
into the learning efficiency of two kinds of the crossover for the
unsupervised learning rule. Moreover, they investigate the learning rate
with respect to the mutation rate.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@incollection{Mandischer93a,
author = {M.~Mandischer},
title = {Representation and Evolution of Neural Networks},
booktitle = {Artificial Neural Nets and Genetic Algorithms
Proceedings of the International Conference
at Innsbruck, Austria},
year = {1993},
editor = {R.F.~Albrecht and C.R.~Reeves and N.C.~Steele},
publisher = {Springer},
address = {Wien and New York},
pages = {643--649},
topology = {feed-forward},
network = { },
encoding = {indirect, developmental},
evolves = {connectivity},
applications = { }
}
@inproceedings{Maniezzo93,
author = {Maniezzo, V.},
title = {Searching Among Search Spaces: Hastening the Genetic Evolution
of Feedforward Neural Networks.},
booktitle = {Artificial Neural Nets and Genetic Algorithms},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C.},
publisher = {Springer-Verlag},
pages = {635-643},
topology = {feed-forward},
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Maniezzo94a,
author = {Maniezzo, V.},
title = {Genetic Evolution of the Topology and Weight Distribution of
Neural Networks},
journal = {IEEE Transactions on Neural Networks},
year = {1994},
volume = {5},
pages = {39-53},
topology = { },
network = { },
encoding = { },
evolves = {connectivity, parameters},
applications = { }
}
@inproceedings{Maricic90,
key = {connectionism},
author = {Borut Maricic and Zoran Nikolov},
title = {GENNET - System for Computer Aided Neural Network Design
Using Genetic Algorithms},
booktitle = {Proceedings of the International Joint Conference on Neural
Networks},
year = {1990},
month = {Jan},
address = {Washington, DC},
pages = {I-102 - I-105},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@article{Marin93,
author = {F.J. Marin and F. Sandoval},
title = {Genetic Synthesis of Discrete-Time Recurrent Neural Network},
journal = {New Trends in Neural Computation, Springer-Verlag},
year = {1993},
pages = {179-184},
topology = {recurrent},
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@inproceedings{Marti92,
key = {genetic algorithms, connectionism},
author = {Leonardo Mart{\'i}},
title = {Genetically Generated Neural Networks II:
Searching for an Optimal Representation},
booktitle = {Proceedings of the International Joint Conference on Neural
Networks},
year = {1992},
pages = {II-221 - II-226},
abstract = {ABSTRACT
Genetic Algorithms (GAs) make use of an internal representation of
a given system in order to perform optimization functions. The actual
structural layout of this representation, called a genome, has a
crucial impact on the outcome of the optimization process. The
purpose of this paper is to study the effects of different internal
representations in a GA, which generates neural networks. A second GA
was used to optimize the genome structure. This structure produces an
optimized system within a shorter time interval.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Marti92a,
key = {genetic algorithms, connectionism},
author = {Leonardo Mart{\'i}},
title = {Genetically Generated Neural Networks I:
Representational Effects},
booktitle = {Proceedings of the International Joint Conference
on Neural Networks},
organization = {IJCNN-92},
year = {1992},
pages = {IV-537 - IV-542},
abstract = {ABSTRACT
This paper studies several applications of genetic algorithms (GAs)
within the neural networks field. After generating a robust GA engine,
the system was used to generate neural network circuit architectures.
This was accomplished by using the GA to determine the weights in a fully
interconnected network. The importance of the internal genetic
representation was shown by testing different approaches. The effects in
speed of optimization of varying the constraints imposed upon the desired
network were also studied. It was observed that relatively loose
constraints provided results comparable to a fully constrained system.
The typeof neural network circuits generated were recurrent competitive
fields as described by Grossberg (1982).},
topology = {recurrent},
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@mastersthesis{Mayer93,
key = {genetic algorithms connectionism neural networks cogann},
author = {Erik Mayer},
title = {Genetic Algorithm Approach to Neural Network Optimization},
year = {1993},
month = {August},
address = {Toledo, Ohio},
school = {University of Toledo},
type = {Masters Thesis},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@article{Maynard87,
author = {Maynard Smith, J.},
title = {When Learning Guides Evolution},
journal = {Nature},
year = {1987},
volume = {329},
pages = {761-762},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{McDonnell92,
key = {genetic algorithms connectionism neural networks cogann
evolutionary programming},
author = {John R. McDonnell and Don E. Waagen},
title = {Evolving Neural Network Architecture},
journal = {Proceedings of SPIE - The International Society for
Optical Engineering},
year = {1992},
volume = {1766},
pages = {690-701},
publisher = {Society for Optical Engineering},
address = {Bellingham, WA USA},
abstract = {ABSTRACT
This work investigates the application of a stochastic search
technique, evolutionary programming, for developing self-organizing
neural networks. The chosen stochastic search method is capable of
simultaneously evolving both network architecture and weights.
The number of synapses and neurons are incorporated into an objective
function so that network parameter optimization is done with respect
to computational costs as well as mean pattern error. Experiments
are conducted using feedforward networks for simple binary mapping
problems.},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {connectivity, parameters},
applications = {binary mapping}
}
@conference{McDonnell93,
key = {connectionism, genetic algorithms, cogann},
author = {John R. McDonnell and Don E. Waagen},
title = {Determining Neural Network Hidden Layer Size Using
Evolutionary Programming},
booktitle = {Proceedings of the World Congress on Neural
Networks 93},
organization = {WCNN93},
year = {1993},
pages = {III564 - III567},
abstract = {ABSTRACT
This work investigates the application of evolutionary programming, a
stochastic search technique, for simultaneously determining the
weights and the number of hidden units in a fully-connected,
multi-layer neural network. The simulated evolution search paradigm
provides a means for optimizing both network structure and weight
coefficients. Orthogonal learning is implemented by independently
modifying network structure and weight parameters. Different
structural level search strategies are investigated by comparing the
training processes for the 3-bit parity problem. The results indicate
that evolutionary programming provides a robust framework for evolving
neural networks.},
topology = {general},
network = { },
encoding = { },
evolves = {connectivity, parameters},
applications = {3-parity}
}
@inproceedings{McDonnell93a,
author = {McDonnell, John R. and Don E. Waagen},
title = {Evolving Neural Network Connectivity},
booktitle = {Proceedings of the American Power Conference},
year = {1993},
publisher = {IEEE},
pages = {863-868},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@article{McDonnell94,
author = {John R. McDonnell and Don E. Waagen},
title = {Evolving Recurrent Perceptrons for Time-Series Modeling},
journal = {IEEE Transactions on Neural Networks},
year = {1994},
volume = {5},
pages = {24-38},
topology = {recurrent perceptron},
network = { },
encoding = { },
evolves = { },
applications = {time-series}
}
@inproceedings{McGregor92,
author = {McGregor, D. R. and Odetayo, M. O. and Dasgupta, D.},
title = {Adaptive-Control of a Dynamic System Using Genetic-Based
Methods},
booktitle = {Proceedings of the 1992 IEEE International Symposium on
Intelligent Control},
year = {1992},
publisher = {IEEE},
pages = {521-525},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {control system}
}
@conference{McInerney,
key = {connectionism cogann xor encoder},
author = {Michael McInerney and Atam P. Dhawan},
title = {Use of Genetic Algorithms with Back Propagation in Training
of Feed-Forward Neural Networks},
organization = {preprint from the Author; don't "no" about publication},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {parameters},
applications = {2-parity, encoder-decoder}
}
@incollection{Menczer94,
author = {Menczer, F. and Belew, R.K.},
title = {Latent Energy Environments},
booktitle = {Plastic Individuals in Evolving Populations},
year = {1994},
editor = {Belew, R.K. and Mitchell, M.},
publisher = {Addison Wesley},
address = {Reading, MA},
note = {(in press)},
series = {Santa Fe Institute Studies in the Sciences of Complexity},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Menczer94a,
author = {Menczer, F. and Belew, R.K.},
title = {Evolving Sensors in Environments of Controlled Complexity},
booktitle = {Artificial Life IV},
year = {1994},
editor = {Brooks, R. and Maes, P.},
publisher = {MIT Press},
address = {Cambridge, MA},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Menczer90,
author = {Menczer, F. and Parisi, D.},
title = {`Sexual' Reproduction in Neural Networks},
institution = {C.N.R.Rome},
year = {1990},
number = {PCIA-90-06},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Menczer92,
key = {genetic algorithms connectionism neural networks cogann},
author = {F. Menczer and D. Parisi},
title = {Recombination and Unsupervised Learning: Effects of
Crossover in the Genetic Optimization of Neural Networks},
journal = {Network: Computation in Neural Systems},
year = {1992},
month = {Nov},
volume = {3},
number = {4},
pages = {423-442},
abstract = {ABSTRACT
Genetic algorithms have been successfully used for optimizing
complex functions over multidimensional domains, such as the space of the
connection weights in a neural network. A feed-forward layered network is
used to simulate the life cycle of a synthetic animal that moves in an
environment and captures food objects. The adaptation of the animal (i.e.
of the network's weight matrix) to the environment can be measured by the
amount of reached food objects in a given lifetime. The Authors consider
"hi" amount as a fitness function to be optimized by a genetic algorithm
over the space of the connection weights. The network can learn the
weights that solve the survival task only by means of its genetic
evolution. The recombination genetic operator (crossover) can be seen
as a model of sexual recombination for the population, while mutation
models agamic reproduction. The central problem in trying to apply
crossover is the difficult mapping between the genetic code string
(genotype) and the network's weight matrix (phenotype). For this
reason crossover has been considered unsuitable for this kind of
problem in the past. The Authors propose " simple mapping and compare
the effects of sexual versus agamic reproduction in such a problem.
The results of several parametric simulations are outlined, showing
that crossover actually helps to speed up the genetic learning.},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {parameters},
applications = {simulated world}
}
@inproceedings{Menczer92a,
key = {genetic algorithms connectionism neural networks cogann},
author = {F. Menczer and D. Parisi},
title = {A Model for the Emergence of Sex in Evolving
Networks: Adaptive Advantage or Random Drift?},
booktitle = {Toward a Practice of Autonomous Systems. Proceedings of the
First European Conference on Artificial Life},
year = {1992},
editor = {F.J. Varela and P. Bourgine},
publisher = {MIT Press},
address = {Cambridge, MA, USA},
pages = {337-345},
abstract = {ABSTRACT
The evolution of sex is an intriguing problem in evolutionary
biology: most high organisms use some form of sexual recombination of the
genetic material in the process of reproduction, thus there should be an
adaptive advantage in recombination if sex was selected in the course of
evolution. One might hope that the new tools offered by the simulation
methods of artificial life, genetic algorithms, (GA) and neural networks,
might help the investigation by allowing the study of simplified
models and of their detailed consequences. The Authors start "ro"
some results on the effects of introducing crossover in a GA used for
evolving a population of artificial animals trained on a simple task.
Since there is a clear advantage in applying crossover versus simple
mutations alone, this advantage could be retained by the population
through selection: this hypothesis is tested in a model with local,
individual genetic operators' probabilities by studying the emergent
recombination frequencies. It is unexpectedly hard to analyze the
results of the simulations, as the operator probabilities do not enter
directly in the computation of fitness, while they have a well-known
indirect influence on the 'behaviour' of fitness. The Authors are
"onitorin" a trait that is not directly selected, thus being subject
to the strong action of random drift.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Menczer92b,
key = {genetic algorithms connectionism neural networks cogann},
author = {F. Menczer and D. Parisi},
title = {Evidence of Hyperplanes in the Genetic Learning of Neural
Networks},
journal = {Biological Cybernetics},
year = {1992},
volume = {66},
number = {3},
pages = {283-289},
abstract = {ABSTRACT
Genetic algorithms (GA) have been successfully applied to the
learning process of neural networks simulating artificial life.
In previous research the Authors (1990) "ompare" mutation and crossover
as genetic operators on neural networks directly encoded as real
vectors. With reference to crossover they were actually testing the
building blocks hypothesis, as the effectiveness of recombination
relies on the validity of such hypothesis. Even with the real
genotype used, it was found that the average fitness of the
population of neural networks is optimized much more quickly by
crossover than it is by mutation. This indicated that the intrinsic
parallelism of crossover is not reduced by the high cardinality.
In this paper the Authors first "ummariz" such findings and then
propose an interpretation in terms of the spatial correlation of the
fitness function with respect to the metric defined by the average
steps of the genetic operators. Some numerical evidence of such
interpretation is given, showing that the fitness surface appears
smoother to crossover than it does to mutation. This confirms
indirectly that crossover moves along privileged directions, and at
the same time provides a geometric rationale for hyperplanes.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Merelo93,
author = {Merelo and Paton and Canias and Prieto and Moran},
title = {Genetic Optimization of a Multilayer Neural Network for
Cluster Classification Tasks},
journal = {Neural Network World},
year = {1993},
pages = {175-186},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = {pattern classification}
}
@inproceedings{Miglino93a,
author = {Miglino, O. and Pedone, R. and Parisi, D.},
title = {A 'Noise Gene' for Econets},
booktitle = {Artificial Neural Nets and Genetic Algorithms},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C.},
publisher = {Springer-Verlag},
pages = {588-594},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Mikami93,
author = {Mikami, S. and Tano, H. and Kakazu, Y.},
title = {An Autonomous Legged Robot That Learns to Walk Through
Simulated Evolution},
booktitle = {Self-Organisation and Life, From Simple Rules to Global
Complexity, Proceedings of the Second European
Conference on Artificial Life},
year = {1993},
publisher = {MIT Press, Cambridge},
pages = {758-767},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {robot controller}
}
@inproceedings{Miller89,
author = {Miller, G.F. and Todd, P.M. and Hegde, S.U.},
title = {Designing Neural Networks Using Genetic Algorithms},
booktitle = {Proceedings of the Third International Conference on
Genetic Algorithms},
year = {1989},
editor = {Schaffer, J.D.},
publisher = {Morgan Kaufmann},
pages = {379-384},
topology = {feed-forward},
network = { },
encoding = {direct},
evolves = {connectivity},
applications = {parity, four-quadrant, pattern copying}
}
@article{Mitchell93,
author = {Mitchell, M. and Forrest, S.},
title = {Genetic Algorithms and Artificial Life},
journal = {Artificial Life},
year = {1993},
mnote = {Santa Fe Institute Working Paper 93-11-072
Genetic algorithms are computational models of evolution that
play a central role in many artificial-life models. We review
the history and current scope of research on genetic
algorithms in artificial life, using illustrative examples in
which the genetic algorithm is used to study how learning and
evolution interact, and to model ecosystems, immune system,
cognitive systems, and social systems. We also outline a
number of open questions and future directions for genetic
algorithms in artificial-life research.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Mitchell93a,
author = {Mitchell, R.J. and Bishop, J.M. and Low, W.},
title = {Using a Genetic Algorithm to Find the Rules of a Neural
Network},
booktitle = {Artificial Neural Nets and Genetic Algorithms},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C.},
publisher = {Springer-Verlag},
pages = {664-669},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Mjolsness86,
key = {connectionism, genetic algorithms, cogann ref},
author = {E. Mjolsness and D. H. Sharp},
title = {A Preliminary Analysis of Recursively Generated Networks},
booktitle = {Proceedings of the American Institute of Physics
(Special Issue on Neural Nets)},
year = {1986},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Mjolsness88,
key = {connectionism, genetic algorithms, cogann ref},
author = {Eric Mjolsness and David H. Sharp and Bradley K. Alpert},
title = {Scaling, Machine Learning, and Genetic Neural Nets},
year = {1988},
month = {March},
address = {Theoretical Division, Los Alamos National Laboratory},
type = {LA-UR-88-142},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inbook{Mjolsness88a,
key = {genetic connectionism, cogann ref},
author = {Eric Mjolsness and David H. Sharp and Bradley K. Alpert},
title = {Genetic Parsimony in Neural Nets},
year = {1988},
booktitle = {Snowbird 1988},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Mjolsness89,
key = {connectionism, cogann ref},
author = {Eric Mjolsness and David H. Sharp and Bradley K. Alpert},
title = {Scaling, Machine Learning, and Genetic Neural Nets},
journal = {Advances in Applied Mathematics},
year = {1989},
volume = {10},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Montana89,
author = {Montana, D.J. and Davis, L.},
title = {Training Feedforward Neural Networks Using Genetic Algorithms},
booktitle = {Proceedings of the Eleventh International Joint
Conference on Artificial Intelligence},
year = {1989},
publisher = {San Mateo, CA: Morgan Kaufmann.},
pages = {762-767},
institution = {BBN Systems},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@techreport{Moriarty93,
author = {David E. Moriarty and Risto Miikkulainen},
title = {Evolving Complex {O}thello Strategies Using
Marker-based Genetic Encoding of Neural Networks},
institution = {Department of Computer Sciences, The University of
Texas at Austin},
year = {1993},
number = {AI93-206},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {game playing (othello)}
}
@techreport{Moriarty94,
author = {David E. Moriarty and Risto Miikkulainen},
title = {Using Evolutionary Neural Networks for Value Ordering
in Constraint Satisfaction Problems},
institution = {Deptartment of Computer Sciences, The University of Texas at
Austin},
year = {1994},
number = {AI94-218},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Moriarty94a,
author = {David E. Moriarty and Risto Miikkulainen},
title = {Evolving Neural Networks to Focus Minimax Search},
booktitle = {Proceedings of the Twelfth National Conference on
Artificial Intelligence (AAAI-94)},
year = {1994},
address = {Seattle, WA},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Munro93,
author = {Munro, P.},
title = {Genetic Search for Optimal Representations in Neural Networks},
booktitle = {Artificial Neural Nets and Genetic Algorithms},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C.},
publisher = {Springer-Verlag},
pages = {628-634},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Nagao92,
key = {genetic algorithms connectionism neural networks cogann},
author = {T. Nagao and T. Agui and H. Nagahashi},
title = {Structural Evolution of Neural Networks by a Genetic Method},
journal = {Transactions of the Institute of Electronics, Information and
Communication Engineers D-II},
year = {1992},
month = {Sep},
volume = {J75D-II},
number = {9},
pages = {1634-1637},
abstract = {ABSTRACT
A method of neural networks construction by a genetic algorithm
is proposed. Each network has mutual connections and is assumed to be a
living thing whose genes denote the connections among its units. In order
to find out a network available to the current task, the simulation of
evolution processes of the networks is executed.},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@article{Nagao93,
author = {Nagao, T. and Agui, T. and Nagahashi, H.},
title = {Structural Evolution of Neural Networks Having Arbitrary
Connection by a Genetic Method},
year = {1993},
volume = {E76-D(6)},
pages = {689-697},
booktitle = {IEICE Transactions on Information and Systems},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Nagao93a,
author = {Nagao, T. and Agui, T. and Nagahashi, H.},
title = {A Genetic Method for Optimization of Asynchronous Random Neural
Networks and its Application to Action Control},
booktitle = {IJCNN'93-NAGOYA Proceedings of the 1993 International Joint
Conference on Neural Networks, Nagoya (Japan)},
year = {1993},
publisher = {IEEE},
pages = {2869-2872},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Narayanan93,
key = {genetic algorithms connectionism neural networks cogann},
author = {M.N. Narayanan and S.B. Lucas},
title = {A Genetic Algorithm to Improve a Neural Network to
Predict a Patient's Response to Warfarin},
journal = {Methods of Information in Medicine},
year = {1993},
volume = {32},
number = {1},
pages = {55-8},
abstract = {ABSTRACT
The ability of neural networks to predict the international
normalised ratio (INR) for patients treated with Warfarin was
investigated.
Neural networks were obtained by using all the predictor variables in the
neural network, or by using a genetic algorithm to select an optimal
subset of predictor variables in a neural network. The use of a
genetic algorithm gave a marked and significant improvement in the
prediction of the INR in two of the three cases investigated. The
mean error in these cases, typically, reduced from 1.02+or-0.29 to
0.28+or-0.25 (paired t-test, t=-4.71, p<0.001, n=30). The use of a
genetic algorithm with Warfarin data offers a significant enhancement
of the predictive ability of a neural network with Warfarin data,
identifies significant predictor variables, reduces the size of the
neural network and thus the speed at which the reduced network can be
trained, and reduces the sensitivity of a network to over-training.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Nolfi90,
author = {Nolfi, S. and Elman, J.L. and Parisi, D.},
title = {Learning and Evolution in Neural Networks},
institution = {UCSD},
year = {1990},
month = {July},
number = {CRL TR 9019},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Nolfi91,
author = {Nolfi, S. and Parisi, D.},
title = {Auto-Teaching Networks That Develop Their Own Teaching Input},
institution = {Dept. of Cognitive Processes and Artificial Intelligence},
year = {1991},
number = {PCIA-91-03},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Nolfi91a,
author = {Nolfi, S. and Parisi, D.},
title = {Growing Neural Networks},
institution = {Institute of Psychology, CNR, Rome},
year = {1991},
number = {PCIA-91-15},
note = {Also in Proceedings of ALIFE III, 1992},
topology = {feed-forward},
network = { },
encoding = {indirect, developmental},
evolves = {connectivity, parameters},
applications = {simulated world}
}
@inproceedings{Nolfi93,
author = {Nolfi, S. and Parisi, D.},
title = {Self-Selection of Input Stimuli for Improving Performance},
booktitle = {Neural Networks In Robotics},
year = {1993},
editor = {Bekey, G.A. and Goldberg, K.Y.},
publisher = {Kluwer academic publishers},
pages = {403-420},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Neill92,
key = {genetic algorithms connectionism neural networks cogann},
author = {A.W. O'Neill},
title = {Genetic Based Training of Two-Layer, Optoelectronic
Neural Network},
journal = {Electronics Letters},
year = {1992},
month = {Jan},
volume = {28},
number = {1},
pages = {47-48},
abstract = {ABSTRACT
For the first time, the supervised training of a high-speed,
two-layer, optoelectronic neural network using a genetic algorithm is
demonstrated, and results for the 3 bit exclusive-or function are
presented.},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = {3-parity}
}
@incollection{Ohsuga91,
author = {Setsuo Ohsuga and Hannu Kangassalo and Hannu Jaakkola and
Koichi Hori and N. Yonezaki},
title = {Information Modeling and Knowledge Bases: Foundations,
Theory, and Applications},
year = {1991},
editor = {Setsuo Ohsuga and Hannu Kangassalo and Hannu Jaakkola and
Koichi Hori and N. Yonezaki},
publisher = {{IOS} Press},
address = {Amsterdam},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Oliker92,
key = {genetic algorithms connectionism neural networks cogann},
author = {S. Oliker and M. Furst and O. Maimon},
title = {A Distributed Genetic Algorithm for Neural Network Design
and Training},
journal = {Complex Systems},
year = {1992},
month = {Oct},
volume = {6},
number = {5},
pages = {459-477},
abstract = {ABSTRACT
A new approach for designing and training neural networks is
developed using a distributed genetic algorithm. A search for the
optimal architecture and weights of a neural network comprising
binary, linear threshold units is performed. For each individual
unit, the Authors look "o" the optimal set of connections and
associated weights under the restriction of a feedforward network
structure. This is accomplished with the modified genetic algorithm,
using an objective function-fitness-that considers, primarily, the
overall network error; and, secondarily, using the unit's possible
connections and weights that are preferable for continuity of
the convergence process. Examples are given showing the potential
of the proposed approach.},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {connectivity, parameters},
applications = { }
}
@inproceedings{Oosthuizen89,
author = {Oosthuizen, G.D.},
title = {Machine Learning: A Mathematical Framework for Neural Network,
Symbolic and Genetic-Based Learning},
booktitle = {Proceedings of the Third International Conference on Genetic
Algorithms},
year = {1989},
pages = {385-390},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Opitz94,
author = {D. W. Opitz and J. W. Shavlik},
title = {Genetically Refining Topologies of Knowledge-Based
Neural Networks},
booktitle = {International Symposium on Integrating Knowledge and
Neural Heuristics},
year = {1994},
month = {May},
address = {Pensacola, FL},
pages = {57-66},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Opitz94a,
author = {D. W. Opitz and J. W. Shavlik},
title = {Using Genetic Search to Refine Knowledge-Based
Neural Networks},
booktitle = {Machine Learning: Proceedings of the Eleventh International
Conference},
year = {1994},
month = {July},
publisher = {Morgan Kaufmann},
address = {New Brunswick, NJ},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@incollection{Paredis90,
key = {genetic algorithms, connectionism cogann},
author = {Jan Paredis},
title = {The Evolution of Behavior: Some Experiments},
booktitle = {Simulation of Adaptive Behavior},
year = {1990}
}
@article{Parisi90,
author = {Parisi, D. and Cecconi, F. and Nolfi, S.},
title = {Econets: Neural Networks That Learn in an Environment},
journal = {Network},
year = {1990},
volume = {1},
pages = {149-168},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Parisi91,
author = {Parisi, D. and Nolfi, S. and Cecconi, F.},
title = {Learning, Behaviour and Evolution},
institution = {C.N.R. Rome},
year = {1991},
number = {PCIA-91-14},
note = {Also in Proceedings of the first european conference on
artificial life ECAL 91, pp.207-216, Varela,F.J. and Bourgine,P. (Eds)},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Penfold93a,
author = {Penfold, H.B. and Kohlmorgen, U. and Schmeck, H.},
title = {Deriving Application-Specific Neural Nets Using a Massively
Parallel Genetic Algorithm},
institution = {The University of Newcastle, Australia},
year = {1993},
number = {Parallel GA 004},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Petridis93,
author = {V. Petridis and S. Kazarlis and A. Papaikonomou},
title = {A Genetic Algorithm for Training Recurrent Neural Networks},
journal = {Proceedings of IJCNN '93, Nagoya Japan},
year = {1993},
pages = {2706-2709},
topology = {recurrent},
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@article{Petridis92,
author = {V. Petridis and S. Kazarlis and A. Papaikonomou and A. Filelis},
title = {A Hybrid Genetic Algorithm for Training Neural Networks},
journal = {proceedings of ICANN '92, Brighton England},
year = {1992},
pages = {953-956},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@inproceedings{Philipsen93,
author = {Philipsen, W.J.M. and Cluitmans, L.J.M.},
title = {Using a genetic algorithm to tune Potts neural networks},
booktitle = {Artificial Neural Nets and Genetic Algorithms},
year = {1993},
editor = {Albrecht, R.F. and Reeves, C.R. and Steele, N.C.},
publisher = {Springer-Verlag},
pages = {650-657},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Porto90,
key = {genetic algorithms, connectionism, cogann ref},
author = {Vincent W. Porto and David B. Fogel},
title = {Neural Network Techniques for Navigation of AUVs},
booktitle = {Proceedings of the IEEE Symposium on Autonomous Underwater
Vehicle Technology},
year = {1990},
month = {5-6 Jun},
address = {Washington, DC},
pages = {137-141}
}
@inproceedings{Potter92,
author = {Potter, M.A.},
title = {A Genetic Cascade-Correlation Learning Algorithm},
booktitle = {Proceedings of COGANN-92 International Workshop on
Combinations of Genetic Algorithms and Neural Networks},
year = {1992},
editor = {Whitley, L.D. and Schaffer, J.D.},
publisher = {IEEE Computer Society Press},
pages = {123-133},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Prados92,
key = {genetic algorithms connectionism},
author = {Donald L. Prados},
title = {New Learning Algorithm for Training Multilayered
Neural Networks that Uses Genetic-Algorithm Techniques},
journal = {Electronics Letters},
year = {1992},
month = {July},
volume = {28},
number = {16},
pages = {1560-1561},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@article{Prados92a,
key = {genetic algorithms connectionism neural networks cogann},
author = {Donald L. Prados},
title = {Training Multilayered Neural Networks by Replacing
the Least Fit Hidden Neurons},
journal = {Proceedings of IEEE SOUTHEASTCON},
year = {1992},
volume = {2},
pages = {634-637},
publisher = {IEEE},
address = {Piscataway, NJ},
abstract = {ABSTRACT
The Author discusses " supervised-learning algorithm, called
GenLearn, for training multilayered neural networks. GenLearn uses
techniques from the field of genetic algorithms to perform a global
search of weight space and, thereby, to avoid the common problem of
getting stuck in local minima. GenLearn is based on survival of the
fittest hidden neuron. In searching for the most fit hidden neurons,
GenLearn searches for a globally optimal internal representation of
the input data. A big advantage of the GenLearn procedure over the
generalized delta rule (GDR) in training three-layered neural nets
is that, during each iteration of GenLearn, each weight in the first
matrix is modified only once, whereas, in the GDR procedure, each
weight in the first matrix is modified once for each output-layer
neuron. What makes this such a big advantage is that, although GenLearn
often reaches the desired mean square error in about the same number of
iterations as the GDR, each iteration takes considerably less time.},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@techreport{Radcliffe90,
key = {genetic algorithms, connectionism, cogann ref},
author = {Nicholas J. Radcliffe},
title = {Genetic Neural Networks on MIMD Computers},
institution = {Dept. of Theoretical Physics
University of Edinburgh},
year = {1990},
address = {Edinburgh, Scotland},
type = {Ph.D. DISS},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Reeves92,
key = {genetic algorithms connectionism neural networks cogann},
author = {C. Reeves and N. Steele},
title = {Problem-Solving by Simulated Genetic Processes: A
Review and Application to Neural Networks},
booktitle = {Proceedings of the Tenth IASTED International
Conference. Applied Informatics},
year = {1992},
editor = {M.H. Hamza},
publisher = {Acta Press},
address = {Zurich, Switzerland},
pages = {269-272},
abstract = {ABSTRACT
In the past decade, researchers have become aware of the value
of simulating natural processes in order to solve large and difficult
problems. One example which is attracting increasing attention is the
idea of a genetic algorithm (GA). The first part of this paper provides
a review of the basic concepts underlying genetic algorithms. The
methodology is illustrated by a simple example, and some of the
issues involved in more advanced GAs are discussed. Finally, it
describes some of their applications. The second part describes in
some detail research carried out in applying genetic algorithms to the
field of neural networks, in particular to the multi-layer perceptron
(MLP). This work falls into two main areas. The first of these deals
with the question of the design of a neural network architecture,
and the choice of a training regime for a particular problem.
The second area of application is to the basic learning process
itself. Traditionally, the MLP has been trained by a process called
back-propagation. This paper reports on an alternative method based
on a GA, and it is argued that such an approach has many advantages over
back-propagation.},
topology = {multi-layered},
network = {multi-layer perceptron},
encoding = { },
evolves = {connectivity, parameters},
applications = { }
}
@article{Rehm92,
key = {genetic algorithms connectionism neural networks cogann},
author = {W. Rehm and V. Sterzing},
title = {An Optimization Method for Multilayer Perceptron
Based on Evolution-Theoretic Principles},
journal = {Informationstechnik - IT},
year = {1992},
month = {Oct},
volume = {34},
number = {5},
pages = {307-312},
abstract = {ABSTRACT
Pattern classification tasks can be addressed successfully
using multilayer perceptrons, i.e. a simple form of feed-forward neural
nets. The training of a neural net can be regarded to be a parametric
optimization problem, for which several possible algorithms are known.
These differ in efficiency, given a fixed complexity of structure and
classification task, and in the implementation constrains on parallel
hardware. The Authors introduce " new method based on
evolution-theoretic principles using operators from genetic algorithms,
that is well suited for a parallel implementation on MIMD architectures.
Finally they provide some results on parity problems.},
topology = {multi-layered},
network = {multi-layer perceptron},
encoding = { },
evolves = {parameters},
applications = { }
}
@inproceedings{Robbins93,
key = {genetic algorithms connectionism neural networks cogann},
author = {P. Robbins and A. Soper and K. Rennolls},
title = {Use of Genetic Algorithms for Optimal Topology
Determination in Back Propagation Neural Networks},
booktitle = {Proceedings of the International Conference on Artificial
Neural Networks and Genetic Algorithms},
year = {1993},
pages = {726-730},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@inproceedings{Romaniuk93,
key = {genetic algorithms connectionism neural networks cogann},
author = {Steve G. Romaniuk},
title = {Evolutionary Growth Perceptrons},
booktitle = {Proceedings of the Fifth International Conference on
Genetic Algorithms},
year = {1993},
network = {perceptron},
encoding = { },
evolves = { },
applications = { }
}
@techreport{Rudnick90,
key = {connectionism, genetic algorithms, cogann ref},
author = {Michael Rudnick},
title = {A Bibliography: The Intersection of Genetic Search and
Artificial Neural Networks},
institution = {Department of Computer Science and Engineering, Oregon
Graduate Institute},
year = {1990},
type = {CS/E 90-001}
}
@techreport{Rudnick92,
key = {connectionism, contiguity problem},
author = {Michael Rudnick},
title = {Genetic Algorithms and Fitness Variance with an Application
to the Automatic Design of Artificial Neural Networks},
institution = {Oregon Graduate Institute of Science and Technology},
year = {1992},
address = {Portland, OR},
type = {Unpublished Ph.D. DISS},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@conference{Rudnick93,
key = {connectionism, genetic algorithms, cogann},
author = {Michael Rudnick},
title = {Evolutionary Network Design and the Contiguity Problem},
booktitle = {Proceedings of the World Congress on Neural Networks 93},
organization = {WCNN93},
year = {1993},
pages = {IV135 - IV138},
abstract = {ABSTRACT
Given a particular problem to solve using an artificial neural network,
we wish to find a superior network architecture; this is called the
network design problem. One approach is to use evolutionary methods,
or evolutionary network design (END). The contiguity problem consists
of counting the number of clumps of 1's in a binary input field.
It is a good test problem for END because, for back-propagation
networks, the space of network architectures has been characterized
with respect to network generalization ability. We present experience
gained using END to find superior network architectures for the
contiguity problem.},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = {contiguity}
}
@inproceedings{Schaffer92,
key = {genetic algorithms, connectionism, neural networks},
author = {J. David Schaffer and Darrell Whitley and Larry J. Eshelman},
title = {Combinations of Genetic Algorithms and Neural
Networks: A Survey of the State of the Art},
booktitle = {Proceedings of the Conference on Combinations of Genetic
Algorithms and Neural Networks},
year = {1992},
pages = {1-37},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Scherf92,
key = {genetic algorithms connectionism neural networks cogann},
author = {A.V. Scherf and L.D. Voelz},
title = {Training Neural Networks with Genetic Algorithms for Target
Detection},
journal = {Proceedings of the SPIE - The International Society for
Optical Engineering},
year = {1992},
volume = {1710, pt.1},
pages = {II-734--II-41},
abstract = {ABSTRACT
Algorithms for training artificial neural networks, such as
backpropagation, often employ some form of gradient descent in their
search for an optimal weight set. The problem with such algorithms
is their tendency to converge to local minima, or not to converge at
all. Genetic algorithms simulate evolutionary operators in their
search for optimality. The techniques of genetic search are applied
to training a neural network for target detection in infrared imagery.
The algorithm design, parameters, and experimental results are
detailed. Testing verifies that genetic algorithms are a useful and
effective approach for neural network training.},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@inproceedings{Schiffmann93,
key = {genetic algorithms connectionism neural networks cogann
application classification of thyroid tests},
author = {W. Schiffmann and M. Joost and R. Werner},
title = {Application of Genetic Algorithms to the Construction
of Topologies for Multilayer Perceptrons},
booktitle = {Proceedings of the International Conference on Artificial
Neural Networks and Genetic Algorithms},
year = {1993},
pages = {675-682},
topology = {multi-layered},
network = {multi-layer perceptron},
encoding = { },
evolves = {connectivity},
applications = { }
}
@incollection{Schiffmann90,
key = {connectionism},
author = {W. Schiffmann and K. Mecklenburg},
title = {Genetic Generation of Backpropagation Trained Neural Networks},
booktitle = {Parallel Processing in Neural Systems and Computers},
year = {1990},
editor = {R. Eckmiller and G. Hartmann and G. Hauske},
publisher = {Elsevier Science Publishers},
pages = {205-208},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@inproceedings{Schizas92,
key = {genetic algorithms, connectionism, neural networks},
author = {C.N. Schizas and C.S. Pattichis and L.T. Middleton},
title = {Neural Networks, Genetic Algorithms and K-Means
Algorithm: In Search of Data Classification},
booktitle = {Proceedings of the Conference on Combinations of Genetic
Algorithms and Neural Networks},
year = {1992},
pages = {201-222},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Sebald90,
author = {Sebald, A.V. and Fogel, D.B.},
title = {Design of SLAYR Neural Networks Using Evolutionary Programming},
booktitle = {Proceedings of the Twenty-Fourth Asilomar Conference on
Signals, Systems and Computers},
year = {1990},
editor = {Chen, R. R.},
publisher = {The Computer Society of IEEE/Maple Press},
pages = {1020-1024},
volume = {2},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Sebald91,
key = {genetic algorithms, connectionism, cogann ref},
author = {A.V. Sebald and D.B. Fogel},
title = {Using Evolutionary Neural Networks for Arterial Waveform
Discriminiation},
booktitle = {Proceedings of the International Joint Conference on Neural
Networks},
year = {1991},
pages = {A-955},
volume = {II},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Sebald92,
key = {genetic algorithms connectionism neural networks cogann},
author = {A.V. Sebald and J. Schlenzig and D.B. Fogel},
title = {Minimax Design of CMAC Encoded Neural Network
Controllers Using Evolutionary Programming},
journal = {Asilomar Conference on Circuits, Systems \& Computers},
year = {1992},
volume = {1},
pages = {551-555},
publisher = {Maple Press, Inc},
address = {San Jose, CA, USA},
abstract = {ABSTRACT
The Authors describe "h" use of evolutionary programming for
computer-aided design and testing of cerebellar model arithmetic computer
(CMAC) encoded neural network regulators. The design and testing
problem is viewed as a game in that the controller parameters are to
be chosen with a minimax criterion, i.e. to minimize the loss
associated with their use on the worst possible plant parameters.
The technique permits analysis of neural strategies against a set of
plants. This gives both the best choice of control parameters and
identification of the plant configuration which is most difficult
for the best controller to handle.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Sharp91,
key = {connectionism, cogann ref},
author = {David H. Sharp and John Reinitz and Eric Mjolsness},
title = {Genetic Algorithms for Genetic Neural Nets},
institution = {Department of Computer Science, Yale University},
year = {1991},
month = {Jan},
type = {Research Report YALEU/DCS/TR-845},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Shibata93,
author = {Shibata, T. and Fukada, T. and Tanie, K.},
title = {Nonlinear Backlash Compensation Using Recurrent Neural Network
- Unsupervised Learning by Genetic Algorithm},
booktitle = {IJCNN'93-NAGOYA Proceedings of the 1993 International Joint
Conference on Neural Networks, Nagoya (Japan)},
year = {1993},
publisher = {IEEE},
pages = {742-745},
topology = {recurrent},
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@inproceedings{Shibata93a,
author = {Shibata, T. and Fukada, T. and Tanie, K.},
title = {Fuzzy Critic for Robotic Motion Planning by Genetic Algorithm
in Hierarchical Intelligent Control},
booktitle = {IJCNN'93-NAGOYA Proceedings of the 1993 International Joint
Conference on Neural Networks, Nagoya (Japan)},
year = {1993},
publisher = {IEEE},
pages = {770-773},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {robot controller}
}
@inproceedings{Shibata93b,
author = {Shibata, T. and Fukada, T. and Tanie, K.},
title = {Synthesis of Fuzzy Artificial Intelligence, Neural Networks,
and Genetic Algorithms for Heirarchical Intelligent Control},
booktitle = {IJCNN'93-NAGOYA Proceedings of the 1993 International Joint
Conference on Neural Networks, Nagoya (Japan)},
year = {1993},
publisher = {IEEE},
pages = {2869-2872},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Shonkwiler92,
key = {genetic algorithms, connectionism, neural networks},
author = {R. Shonkwiler and Kenyon R. Miller},
title = {Genetic Algorithm/Neural Network Synergy For
Nonlinear Constrained Optimization Problems},
booktitle = {Proceedings of the Conference on Combinations of Genetic
Algorithms and Neural Networks},
year = {1992},
pages = {248-257},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {optimization}
}
@techreport{Smieja92,
key = {genetic algorithms connectionism neural networks cogann},
author = {F.J. Smieja},
title = {Evolution of Intelligent Systems in a Changing Environment},
institution = {Gesellschaft fuer Mathematik und Datenverarbeitung m.b.H.},
year = {1992},
address = {Bonn, Germany},
type = {GMD-623},
pages = {24},
abstract = {ABSTRACT
In the report a first version of a simulated robot is described, which
will embody both neural network and genetic algorithm optimization
procedures. The system is modularly structured, with neural networks
at the lower (recognition) level of the simple brain of the robot,
and at the higher level prescribed decision behaviors are followed.
It is the higher level parameters determining the nature of the
decisions made that are to be optimized via genetic algorithms.
Having sketched the structure and method of operation of the
prototype robot, a community of robots situation is introduced as
the next stage, for optimization within a robot-inhabited world.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {simulated world}
}
@article{Smith94,
author = {Smith, R. E. and Cribbs, H. B.},
title = {Is a Learning Classifier System a Type of Neural Network?},
journal = {Evolutionary Computation},
year = {1994},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Smith93,
key = {cogann genetic algorithms, connectionism, neural networks,
classifier systems},
author = {Robert E. Smith},
title = {Genetic Learning in Rule-Based and Neural Systems},
journal = {Proceedings of the Third International
Workshop on Neural Networks and Fuzzy Logic},
year = {1993},
month = {Jan},
volume = {1},
pages = {183},
publisher = {NASA. Johnson Space Center},
abstract = {ABSTRACT
The design of neural networks and fuzzy systems can involve complex,
nonlinear, and ill-conditioned optimization problems. Often, traditional
optimization schemes are inadequate or inapplicable for such tasks.
Genetic Algorithms (GA's) are a class of optimization procedures
whose mechanics are based on those of natural genetics. Mathematical
arguments show how GAs bring substantial computational leverage to
search problems, without requiring the mathematical characteristics
often necessary for traditional optimization schemes (e.g., modality,
continuity, availability of derivative information, etc.). GA's
have proven effective in a variety of search tasks that arise in
neural networks and fuzzy systems. This presentation begins by
introducing the mechanism and theoretical underpinnings of GA's.
GA's are then related to a class of rule-based machine learning
systems called learning classifier systems (LCS's). An LCS
implements a low-level production-system that uses a GA as its
primary rule discovery mechanism. This presentation illustrates how,
despite its rule-based framework, an LCS can be thought of as a
competitive neural network. Neural network simulator code for an
LCS is presented. In this context, the GA is doing more than
optimizing and objective function. It is searching for an ecology
of hidden nodes with limited connectivity. The GA attempts to
evolve this ecology such that effective neural network
performance results. The GA is particularly well adapted to this
task, given its naturally-inspired basis. The LCS/neural network
analogy extends itself to other, more traditional neural networks.
Conclusions to the presentation discuss the implications of using
GA's in ecological search problems that arise in neural and
fuzzy systems.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Spears90,
key = {COGANN connectionism},
author = {W.M. Spears and K.A. De Jong},
title = {Using Neural Networks and Genetic Algorithms as
Heuristics for NP-Complete Problems},
booktitle = {Proceedings of the International Joint Conference on Neural
Networks},
year = {1990},
pages = {118 - 121},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Spiessens92,
key = {genetic algorithms connectionism neural networks cogann},
author = {P. Spiessens and J. Torreele},
title = {Massively Parallel Evolution of Recurrent Networks:
An Approach to Temporal Processing},
booktitle = {Toward a Practice of Autonomous Systems. Proceedings of the
First European Conference on Artificial Life},
year = {1992},
editor = {F.J. Varela and P. Bourgine},
publisher = {MIT Press},
address = {Cambridge, MA, USA},
pages = {70-77},
abstract = {ABSTRACT
The Authors investigate "" evolutionary approach to the problem
of time-dependent processing with recurrent networks. Both structure and
weights of these networks are evolved by a fine-grained parallel genetic
algorithm. The parallel nature of this algorithm, which enables the
co-evolution of clusters of networks, made it possible to successfully
solve three non-trivial temporal processing problems. One of
these problems consists of evolving a trail-following behaviour
for an artificial ant.},
topology = {recurrent},
network = { },
encoding = { },
evolves = {connectivity, parameters},
applications = {simulated world}
}
@inproceedings{Stacey91,
key = {genetic algorithms, connectionism, cogann ref},
author = {Deborah A. Stacey and Stefan Kremer},
title = {The Guelph Darwin Project: The Evolution of Neural Networks
by Genetic Algorithms},
booktitle = {Proceedings of the International Joint Conference on Neural
Networks},
year = {1991},
pages = {A-957},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { },
journal = {IJCNN-91},
volume = {II}
}
@inproceedings{Stork90,
key = {genetic algorithms, connectionism},
author = {D.G. Stork and S. Walker and M. Burns and B. Jackson},
title = {Preadaptation in Neural Circuits},
booktitle = {Proceedings of the International Joint Conference on
Neural Networks},
year = {1990},
pages = {I-202 - I-205},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Suzuki91,
key = {genetic algorithms applications pattern classification
categorization; relation AI machine learning connectionist networks
basin; associative memory model; analysis, neural, cogann ref},
author = {Keiji Suzuki and Yukinori Kakazu},
title = {An Approach to the Analysis of the Basins of the
Associative Memory Model Using Genetic Algorithms},
booktitle = {Proceedings of the Fourth International Conference on
Genetic Algorithms},
year = {1991},
pages = {539-546},
abstract = {Abstract:
In this paper, an approach to the analysis of the brain
of a correlational associative memory model using the Genetic
Algorithms and a new training algorithm for this model is described.
The recalling process of a model described by direction cosine
is insufficient for the better understanding of the dynamical
behavior of the model. In order to know the characteristics of
memorized states, the methodology of the Genetic Algorithms applied
to analyze the recalling process concerned with each memorized
state is proposed. Furthermore, before the analyzing, the
LU-algorithm is proposed to give the model the ability of
keeping a wide basin in both highly memorized rates and mutually
non-orthogonal states. Finally, results of experiments related
to the basin analysis are shown.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Tackett91,
key = {connectionism, genetic algorithms, cogann},
author = {Walter Alden Tackett},
title = {Genetic Generation of Dendritic Trees for Image Classification},
booktitle = {Proceedings of the World Congress on Neural Networks},
year = {1993},
pages = {IV646 - IV649},
abstract = {ABSTRACT
Genetic Programming (GP) is an adaptive method for generating executable
programs from labeled training data. It differs from the conventional
methods of Genetic Algorithms because it manipulates tree structures
of arbitrary size and shape rather than fixed length binary strings.
We apply GP to the development of a processing tree with a dendritic,
or neuron-like structure: measurements from a set of input nodes
are weighted and combined through linear and nonlinear operations
to form an output response. Unlike conventional neural methods,
no constraints are placed upon size, shape, or order of processing
withing the network. This network is used to classify feature
vectors extracted from IR imagery into target/nontarget catagories
using a database of 2000 training samples. Performance is tested
against a separate database of 7000 samples. For purposes of
comparison, the same training and test sets are used to train
two other adaptive classifier systems, the binary tree classifier
and the Backpropagation neural network. The GP network acheives
higher performance with reduced computational requirements.},
topology = { },
network = { },
encoding = {indirect, LISP program},
evolves = { },
applications = {image classification}
}
@inproceedings{Takagi93,
key = {connectionism, genetic algorithms, cogann},
author = {Hideyuki Takagi},
title = {Neural Network and Genetic Algorithm Techniques for
Fuzzy Systems},
booktitle = {Proceedings of the World Congress on Neural Networks},
year = {1993},
pages = {II631 - II634},
abstract = {ABSTRACT
This paper introduces (1) how neural networks and genetic algorithms
have been used for auto-designing fuzzy systems, (2) how neural
networks are combined with fuzzy systems in commecial applications,
and (3) how fuzzy systems are used to improve the performance of
neural networks and genetic algorithms.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Tamburino92,
key = {genetic algorithms connectionism neural networks cogann
cooperative application},
author = {Louis A. Tamburino and Mateen M. Rizki},
title = {Performance-Driven Autonomous Design of
Pattern-Recognition Systems},
journal = {Applied Artificial Intelligence},
year = {1992},
volume = {6},
number = {1},
pages = {59-77},
abstract = {ABSTRACT
The closed-loop design experiment described in this paper
demonstrates a three-phase automated design approach to pattern
recognition. The experiment generates morphological feature detectors and
then uses a novel application of genetic algorithms to select cooperative
sets of features to pass to a neural net classifier. The self-organizing
hybrid learning approach embodied in this closed-loop design
methodology is complementary to conventional artificial intelligence
(AI) expert systems that utilize rule-based approaches and a
specific set of design elements. This experiment is part of a
study directed to emulating the nondirected processes of biological
evolution. The approach we discuss is semiautomatic in that
initialization of computer programs requires human experience and
expertise to select representations, develop search strategies,
choose performance measures, and devise resource-allocation strategies.
The hope is that these tasks will become easier with experience
and will provide the means to exploit parallel processing without
the need to analyze or program an entire design solution.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Thierens93,
key = {genetic algorithms connectionism neural networks cogann
pole inversion},
author = {D. Thierens and J. Suykens and J. Vandewalle and B. De Noor},
title = {Genetic Weight Optimization of a Feedforward Neural
Network Controller},
booktitle = {Artificial Neural Nets and Genetic Algorithms
Proceedings of the International Conference
at Innsbruck, Austria},
year = {1993},
editor = {R.F.~Albrecht and C.R.~Reeves and N.C.~Steele},
publisher = {Springer},
address = {Wien and New York},
pages = {658-663},
abstract = {ABSTRACT
The optimization of the weights of a feedforward neural network
with a genetic algorithm is discussed. The search by the
recombination operator is hampered by the existence of two functionally
equivalent symmetries in feedforward neural networks. To sidestep
these representation redundancies we reorder the hidden neurons
on the genotype before recombination according to a weight sign
matching criterion, and flip the weight signs of a hidden
neuron's connections whenever there are more inhibitory than
excitatory incoming and outgoing links. As an example we optimize
a feedforward neural network that implements a nonlinear
optimal control law. The neural controller has to swing up the
inverted pendulum from its lower equilibrium point to its
upper equilibrium point and stabilize it there. Finding weights of
the network represents a nonlinear optimization problem which is
solved by the genetic algorithm.},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {parameters},
applications = {controller}
}
@unpublished{Todd88,
author = {Todd, P.},
title = {Evolutionary Methods for Connectionist Architectures},
year = {1988},
note = {unpublished internal report, Stanford},
institution = {Stanford},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Todd??,
key = {genetic algorithm, connectionism, Hebbian learning},
author = {Peter M. Todd and Geoffery F. Miller},
title = {Exploring Adaptive Agency II: Simulating the Evolution
of Associative Learning},
booktitle = {Proceedings of the International Conference on Simulation of
Adaptive Behavior: From Animals to Animats},
editor = {S. Wilson and J.-A. Meyer},
publisher = {MIT Press},
pages = {306-315},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Torreele91,
author = {Torreele, J.},
title = {Temporal Processing With Recurrent Networks: An Evolutionary
Approach},
booktitle = {Proceedings of the Fourth International Conference on Genetic
Algorithms},
year = {1991},
editor = {Belew, R.K. and Booker, L.B.},
publisher = {Morgan Kaufmann},
pages = {555-561},
abstract = {Abstract:
In this paper we present an evolutionary approach to the problem
of temporal processing with recurrent networks. A genetic algorithm
is used to evolve both structure and weights, so as to alleviate
the design and learning problem recurrent networks suffer from.
The viability of this approach is demonstrated by successfully
solving two nontrivial temporal processing problems. The
important technique of teacher forcing is identified and its
influence on the performance of the algorithm is empirically
demonstrated.},
topology = {recurrent},
network = { },
encoding = { },
evolves = {connectivity, parameters},
applications = {temporal pattern recognition}
}
@conference{Toth93,
key = {connectionism, genetic algorithms, cogann},
author = {Gabor J. Toth and Andras Lorincz},
title = {Genetic Algorithm With Migration on Topology Conserving Maps},
booktitle = {Proceedings of the World Congress on Neural Networks},
organization = {WCNN93},
year = {1993},
pages = {III168 - III171},
abstract = {ABSTRACT
Optimization problems depending on external variables (parameters)
are treated with the help of a Kohonen network extended by a
genetic algorithm (GA). The optimal solution is assumed to have
continuous dependence on the external variables. The GA was
generalized to organize individuals into subpopulations, which
were allocated in the space of the external variables in an optimal
fashion by Kohnonen digitization. Individuals were allowed to
breed within their own subpopulations and in neighboring ones
(migration). To illustrate the strength of the modified GA the
optimal control of a simulated robot-arm is treated: a falling
ping-pong ball has to be caught by a bat without bouncing. It is
shown that the simultaneous optimization problem (for different
values of the external parameter) can be solved successfully, and
the migration can considerably reduce computation time.},
network = {kohonen},
encoding = { },
evolves = { },
applications = { }
}
@incollection{Uhr63,
author = {Uhr, L. and Vossler, C.},
title = {A Pattern Recognition Program that Generates, Evaluates and Adjusts
its Own Operators},
booktitle = {Computers and Thought},
year = {1963},
editor = {Feigenbaum, E. and Feldman, J.},
publisher = {McGraw-Hill, New York},
topology = {feed-forward},
network = { },
encoding = { },
evolves = {feature detectors},
applications = {pattern classification}
}
@inproceedings{Uthmann93,
key = {connectionism cogann},
author = {T. Uthmann and D. Polani},
title = {Training Kohonen Feature Maps in Different
Topologies: An analysis using Genetic Algorithms},
booktitle = {Proceedings of the International Conference on Genetic
Algorithms},
year = {1993},
network = {kohonen},
encoding = { },
evolves = { },
applications = { }
}
@phdthesis{Vaario93,
author = {Jari Vaario},
title = {An Emergent Modeling Method for Artificial Neural Networks},
year = {1993},
school = {The University of Tokyo},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Vaario94,
author = {Jari Vaario},
title = {Artificial Life as Constructivist {AI}},
journal = {Journal of {SICE} ({J}apanese {S}ociety of {I}nstrument
and {C}ontrol {E}ngineers)},
year = {1994},
volume = {33},
number = {1},
pages = {65-71},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Vaario94a,
author = {Jari Vaario},
title = {From Evolutionary Computation to Computational Evolution},
journal = {Informatica},
year = {1994},
note = {(to appear)},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Vaario94b,
author = {Jari Vaario},
title = {Modeling Adaptative Self-Organization},
booktitle = {Proceedings of Artificial {L}ife {IV}},
year = {1994},
month = {July 6-8},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Vaario93a,
author = {Jari Vaario and Koichi Hori and Setsuo Ohsuga},
title = {Toward Evolutionary Design of Autonomous Systems},
journal = {The International Journal in Computer Simulation},
year = {1994},
note = {(to appear)},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@incollection{Vaario91,
author = {Jari Vaario and Setsuo Ohsuga},
title = {Adaptive Neural Architectures through Growth Control},
booktitle = {Intelligent Engineering Systems through Artificial Neural
Networks},
year = {1991},
pages = {11--16},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Vaario92,
author = {Jari Vaario and Setsuo Ohsuga},
title = {An Emergent Construction of Adaptive Neural Architectures},
journal = {Heuristics - The Journal of Knowledge Engineering},
year = {1992},
volume = {5},
number = {2},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@incollection{Vaario93b,
author = {Jari Vaario and Setsuo Ohsuga},
title = {On Growing Intelligence},
booktitle = {Neural Networks and a New {AI}},
year = {1994},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@incollection{Vaario91a,
author = {Jari Vaario and Setsuo Ohsuga and Koichi Hori},
title = {Connectionist Modeling Using {Lindenmayer} Systems},
booktitle = {Information Modeling and Knowledge Bases: Foundations,
Theory, and Applications},
year = {1991},
pages = {496--510},
topology = { },
network = { },
encoding = {indirect, L-systems},
evolves = { },
applications = { }
}
@article{Vemuri92,
author = {V. Vemuri},
title = {Neural Networks Can be Used for Open-Loop, Dynamic Control},
journal = {International Journal of Neural Networks},
year = {1992},
volume = {2},
pages = {71-84},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {control systems}
}
@inproceedings{Vico91,
author = {Vico, F.J. and Sandoval, F.},
title = {Use of Genetic Algorithms in Neural Networks Definition},
booktitle = {Artificial Neural Networks, IWANN91, Granada},
year = {1991},
editor = {Prieto, A.},
publisher = {Lecture notes in Computer Science 540, Springer Verlag},
pages = {196-203},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Vico92,
key = {genetic algorithms connectionism neural networks cogann},
author = {F.J. Vico and F. Sandoval},
title = {Neural Networks Definition Algorithm},
journal = {Microprocessing and Microprogramming},
year = {1992},
volume = {34},
number = {1-5},
pages = {251-254},
abstract = {ABSTRACT
There is not a general methodology for neural network
definition. The Authors propose "" algorithm highly inspired on
biological concepts for generating neural networks oriented to
solve particular problems given on terms of input and output.
With this algorithm they intend to specify formal tools of general
use for network definition, and to disclose underlying processing
structures of the living organisms. The concepts of genetic code,
embryogenesis and evolution are the main keys in the development of
the algorithm they propose.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Voigt93,
author = {Voigt, H-M. and Born, J. and Santibanez-Koref, I.},
title = {Evolutionary Structuring of Artificial Neural Networks},
institution = {Bionics and Evolution Techniques Laboratory, Technical University Berlin},
year = {1993},
number = {TR-02-93},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Weiss90,
author = {Weiss, G.},
title = {Combining Neural and Evolutionary Learning: Aspects and
Approaches},
institution = {Technical University of Munich},
year = {1990},
month = {May},
type = {TUM FKI-132-90},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Werner90,
author = {Werner, G.M. and Dyer, M.G.},
title = {Evolution of Communication in Artificial Organisms},
institution = {AI Lab, UCSD},
year = {1990},
number = {UCLA-AI-90-06},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {simulated world}
}
@inproceedings{Werner91,
key = {genetic algorithm connectionism neural networks},
author = {G.M. Werner and M.G. Dyer},
title = {Evolution of Communication in Artificial Organisms},
booktitle = {Artificial Life II: Proceedings of the Workshop on Artificial
Life Held in 1990},
year = {1991},
pages = {659-687},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {simulated world}
}
@article{Whitaker93,
key = {genetic algorithms connectionism neural networks cogann},
author = {Kevin W. Whitaker and Ravi K. Prasanth and Robert E. Markin},
title = {Specifying Exhaust Nozzle Contours With a Neural Network},
journal = {AIAA Journal},
year = {1993},
month = {Feb},
volume = {31},
number = {2},
pages = {273-277},
abstract = {ABSTRACT
Thrust vectoring is continuing to become an important issue in
future military aircraft system designs. A recently developed concept of
vectoring aircraft thrust makes use of flexible exhaust nozzles. Subtle
modifications in the nozzle wall contours produce a nonuniform flowfield
containing a complex pattern of shock and expansion waves. The end
result, due to the asymmetric velocity and pressure distributions,
is vectored thrust. Specification of the nozzle contours required
for a desired thrust vector angle (an inverse design problem) has
been achieved with genetic algorithms. However, this approach is
computationally intensive, preventing nozzles from being designed
on demand, which is necessary for an operational aircraft system.
An investigation was conducted into using genetic algorithms to
train a neural network in an attempt to obtain, in real time,
two-dimensional nozzle contours. Results show that genetic-algorithm
trained neural networks provide a viable, time-efficient alternative
for designing thrust vectoring nozzle contours. Thrust vector
angles up to 20 deg were obtained within an average error of
0.0914 deg. The error surfaces encountered were highly degenerate
and thus the robustness of genetic algorithms was well suited
for minimizing global errors.},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@article{Whitehead94,
author = {Whitehead, B.A. and Choate, T.D.},
title = {Evolving Space-Filling Curves to Distribute Radial Basis
Functions Over an Input Space},
journal = {IEEE Transactions on Neural Networks},
year = {1994},
volume = {5},
pages = {15-23},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Whitley90a,
author = {Whitley, D. and Bogart, C.},
title = {The Evolution of Connectivity: Pruning Neural Networks Using
Genetic Algorithms},
booktitle = {Proceedings of the International Joint Conference on Neural
Networks},
year = {1990},
publisher = {IEEE Press},
pages = {134-137},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Whitley91,
author = {Whitley, D. and Dominic, S. and Das, R.},
title = {Genetic Reinforcement Learning with Multilayer Neural Networks},
booktitle = {Proceedings of the Fourth International Conference on
Genetic Algorithms},
year = {1991},
editor = {Belew, R. K. and Booker, L. B.},
publisher = {Morgan Kaufmann, San Mateo, CA},
pages = {562-569},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Whitley89,
author = {Whitley, D. and Hanson, T.},
title = {Optimizing Neural Networks Using Faster, More Accurate Genetic
Search},
booktitle = {Proceedings of the Third International Conference on
Genetic Algorithms},
year = {1989},
editor = {Schaffer, J.D.},
publisher = {Morgan Kaufmann},
pages = {391-396},
institution = {Computer Science Dept. Colorado Univ},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Whitley90,
author = {Whitley, D. and Starkweather, T. and Bogart, C.},
title = {Genetic Algorithms and Neural Networks: Optimizing Connections
and Connectivity},
journal = {Parallel Computing},
year = {1990},
volume = {14-3},
pages = {347-361},
institution = {Colorado State University},
topology = { },
network = { },
encoding = { },
evolves = {connectivity, parameters},
applications = { }
}
@inproceedings{Whitley88,
key = {connectionism, cogann ref},
author = {Darrell Whitley},
title = {Applying Genetic Algorithms to Neural Network
Problems: A Preliminary Report},
booktitle = {Proceedings of the International Neural Network Society
Conference},
organization = {PROC INNS88},
year = {1988},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Whitley88a,
key = {connectionism, cogann ref},
author = {Darrell Whitley},
title = {Applying Genetic Algorithms to Neural Network Learning},
institution = {Department of Computer Science, Colorado State University},
year = {1988},
number = {CS-88-128},
note = {Also appeared in: Proceedings of the 7th Conference for the Study of
Artificial Intelligence and Simulated Behavior, Sussex, England 1989.
Pitman Publishers.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@book{Whitley89a,
key = {cogann ref, connectionism},
author = {Darrell Whitley},
title = {Optimizing Neural Networks Using Genetic Algorithms},
year = {1989},
publisher = {Markt and Technik},
address = {Munich, Germany},
journal = {Special Neurocomputing Issue of Design and Electronik},
topology = { },
network = { },
encoding = { },
evolves = {parameters},
applications = { }
}
@inproceedings{Whitley89b,
key = {connectionism, cogann ref},
author = {Darrell Whitley},
title = {Genetic Algorithm Applications: Neural Nets,
Traveling Salesmen and Schedules},
booktitle = {Proceedings of the 1989 Rocky Mountain Conference on
Artificial Intelligence},
year = {1989},
address = {Denver, CO},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@conference{Whitley92,
key = {genetic algorithms, connectionism, hill-climbing,
mutation only, cogann ref},
author = {Darrell Whitley and S. Dominic and R. Das and C. Anderson},
title = {Genetic Reinforcement Learning for Neurocontrol Problems},
organization = {Machine Learning},
year = {1992},
abstract = {Abstract
Empirical tests indicate that the class of genetic algorithms which
have been shown to yield good performance for neural network
weight optimization are really genetic hill-climbers, with a
strong reliance on mutation rather than hyperplane sampling.
These results are consistent with the theoretical results of
Goldberg (1991) analyzing real-coded genetic algorithms. We argue that
neural network learning applications such as neurocontrol problems
are perhaps more appropriate for these genetic hill-climbers than
supervised learning applications because in reinforcement learning
applications gradient information is not directly available. On
an inverted pendulum control problem reinforcement learning
produces competitive results with AHC, another well-known
reinforcement learning paradigm for neural networks that employs
temporal difference methods. The genetic hill-climbing algorithm
appears to be robust over a wide range of learning conditions.
We also discuss several approaches for evaluating neural network
performance.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Whitley,
key = {genetic algorithms relation AI machine learning
connectionist networks; reinforcement learning, connectionism,
training, real coding, pole balancing, cogann ref},
author = {Darrell Whitley and Stephen Dominic and Rajarshi Das},
title = {Genetic Reinforcement Learning with Multilayer Neural Networks},
booktitle = {Proceedings of the International Conference on Genetic
Algorithms},
year = {1991},
pages = {562-5769},
abstract = {Abstract:
Empirical tests indicate that the genetic algorithms which have
produced good performance for neural network weight optimization are
really genetic hill-climbers, with a strong reliance on mutation
rather than hyperplane sampling. Initial results are presented
using genetic hill-climbers for reinforcement learning with
multilayer neural networks for the control of a simulated cart-centering
and pole-balancing dynamical system. "Genetic reinforcement learning"
produces competitive results with AHC, a well-known reinforcement
learning paradigm for neural networks that employs temporal
difference methods.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Wieland90,
author = {Alexis P. Wieland},
title = {Evolving Controls for Unstable Systems},
booktitle = {Proceedings of the 1990 Connectionist Models Summer School},
year = {1990},
editor = {Touretzky, D.S. and Elman, J.L. and Sejnowski, T.J.
and Hinton, G.E.},
publisher = {Morgan Kaufman},
pages = {91-102},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Wieland91,
key = {genetic algorithms, connectionism, pole balancing problems,
cogann ref},
author = {Alexis P. Wieland},
title = {Evolving Neural Network Controllers for Unstable Systems},
journal = {IJCNN-91},
year = {1991},
volume = {II},
pages = {667-673},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Wilke93,
key = {connectionism, genetic algorithms, cogann},
author = {Peter Wilke},
title = {Simulation of Neural Network and Genetic Algorithms in a
Distributed Computing Environment Using NeuroGraph},
booktitle = {Proceedings of the World Congress on Neural Networks},
year = {1993},
pages = {I269 - I272},
abstract = {ABSTRACT
NeuroGraph is a simulation environment for design, construction
and execution of neural networks and genetic algorithms in a
distributed computing environment. The simulator parts either run
on single computers or as distributed applications on Unix/X-based
networks, consisting of personal computers, workstations, or
multi-processors. The parallelization component offers the
possibility to divide computational tasks into concurrently
executable modules, according to restrictions due to the neural
net topology and computer net capabilities, ie. NeuroGraph tries
to select the best configuration out of the available distributed
hardware environment to fit performance requirements.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Williams94,
author = {Williams, B. V. and Bostock, R. T. J. and Bounds, D. G.
and Harget, A. J.},
title = {Improving Classification Performance in the Bumptree
Network by Optimising Topology with a Genetic Algorithm},
booktitle = {IEEE Evolutionary Computation 1994},
year = {1994},
abstract = {ABSTRACT:
The Bumptree is a binary tree of Gaussians which partitions a
Euclidian space. The leaf layer consists of a set of local linear
classifiers, and the whole system can be trained in a supervised
manner to form a piecewise linear model. In this paper a
Genetic Algorithm (GA) is used to optimise the topology of the tree.
We discuss the properties of the genetic coding scheme, and argue
that the GA/bumptree does not suffer from the same scaling
problems as other GA/neural-net hybrids. Results on test problems,
including a non-trivial classification task, are encouraging,
with the GA able to discover topologies which give improved
performance over those generated by a constructive algorithm.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Williams93,
author = {Williams, B.V. and Bounds, D.G.},
title = {Learning and Evolution in Populations of Backprop Networks},
booktitle = {Proceedings of ECAL93 -- European Conference on
Artificial Life},
year = {1993},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@misc{Wilson??,
author = {C. L. Wilson and O.M. Omidvar},
title = {Optimization of Neural Network Topology and Information
Content Using Boltzmann Methods},
year = {????},
howpublished = {NIST ir4766, at the NIST archive},
note = {available via ftp from (the NIST archive)},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@incollection{Wilson90,
key = {genetic algorithm, connectionism, cogann ref},
author = {Stewart W. Wilson},
title = {Perceptron Redux: Emergence of Structure},
booktitle = {Emergent Computation},
year = {1990},
editor = {Stephanie Forrest},
publisher = {North Holland},
address = {Amsterdam},
pages = {249-256},
topology = { },
network = { },
encoding = { },
evolves = {connectivity},
applications = { }
}
@inproceedings{Yaeger94,
author = {Larry Yaeger},
title = {Computational Genetics, Physiology, Metabolism, Neural Systems,
Learning, Vision and Behavior or PolyWorld: Life in a New Context},
booktitle = {Artificial Life III, Proceedings Volume XVII},
organization = {Santa Fe Institute Studies in the Sciences of Complexity},
year = {1994},
editor = {C. G. Langton},
publisher = {Addison-Wesley},
pages = {263-298},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = {simuated world}
}
@inproceedings{Yamada93,
author = {Yamada, T. and Yabuta, T.},
title = {Remarks on Neural Network Controller Which Uses Genetic
Algorithm},
booktitle = {IJCNN'93-NAGOYA Proceedings of the 1993 International Joint
Conference on Neural Networks, Nagoya (Japan)},
year = {1993},
publisher = {IEEE},
pages = {2783-2786},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@techreport{Yao92,
author = {Xin Yao},
title = {A Review of Evolutionary Artificial Neural Networks},
institution = {Commonwealth Scientific and Industrial Research
Organization, Division of Building, Construction and Engineering},
year = {1992},
address = {PO Box 56, Highett, Victoria 3190, Australia},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@article{Yao93,
key = {genetic algorithms connectionism neural networks cogann},
author = {Xin Yao},
title = {A Review of Evolutionary Artificial Neural Networks},
journal = {International Journal of Intelligent Systems},
year = {1993},
month = {April},
volume = {8},
number = {4},
pages = {539-67},
abstract = {ABSTRACT
Research on potential interactions between connectionist
learning systems, i.e., artificial neural networks (ANNs), and
evolutionary search procedures, like genetic algorithms (GAs),
has attracted a lot of attention. Evolutionary ANNs (EANNs)
can be considered as the combination of ANNs and evolutionary
search procedures. This article first distinguishes among
three kinds of evolution in EANNs, i.e., the evolution of
connection weights, of architectures, and of learning rules.
Then it reviews each kind of evolution in detail and
analyzes critical issues related to different evolutions.
The review shows that although a lot of work has been done
on the evolution of connection weights and architectures,
few attempts have been made to understand the evolution of
learning rules. Interactions among different evolutions
are seldom mentioned in current research. However, the evolution
of learning rules and its interactions with other kinds of evolution,
play a vital role in EANNs. Finally, this article briefly
describes a general framework for EANNs, which not only includes
the aforementioned three kinds of evolution, but also considers
interactions among them.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}
@inproceedings{Zhang93,
key = {connectionism, cogann},
author = {Byoung-Tak Zhang and H. M\H{u}hlenbein},
title = {Genetic Programming of Minimal Neural Nets Using
Occam's Razor},
booktitle = {Proceedings of the International Conference on Genetic
Algorithms},
year = {1993},
topology = { },
network = { },
encoding = {indirect, LISP program},
evolves = { },
applications = { }
}
@inproceedings{Zhang91,
key = {genetic algorithms connectionism neural networks cogann},
author = {Byoung-Tak Zhang and Gerd Veenker},
title = {Neural Networks that Teach Themselves Through Genetic
Discovery of Novel Examples},
booktitle = {Proceedings of the International Joint Conference on
Neural Networks},
year = {1991},
pages = {690-695},
abstract = {ABSTRACT
The Authors introduce "" active learning paradigm for neural
networks. In contrast to the passive paradigm, the learning in
the active paradigm is initiated by the machine learner instead
of its environment or teacher. The Authors present " learning
algorithm that uses a genetic algorithm for creating novel examples
to teach multilayer feedforward networks. The creative learning
networks, based on their own knowledge, discover new examples,
criticize and select useful ones, train themselves, and thereby
extend their existing knowledge. Experiments on function
extrapolation show that the self-teaching neural networks not
only reduce the teaching efforts of the human, but the genetically
created examples also contribute robustly to the improvement of
generalization performance and the interpretation of the
connectionist knowledge.},
topology = { },
network = { },
encoding = { },
evolves = { },
applications = { }
}