Gradient Descent Emerges via Natural Selection
Created by W.Langdon from
gp-bibliography.bib Revision:1.8620
- @InProceedings{shen:2025:CEC,
-
author = "Ailun Shen",
-
title = "Gradient Descent Emerges via Natural Selection",
-
booktitle = "2025 IEEE Congress on Evolutionary Computation (CEC)",
-
year = "2025",
-
editor = "Yaochu Jin and Thomas Baeck",
-
address = "Hangzhou, China",
-
month = "8-12 " # jun,
-
publisher = "IEEE",
-
keywords = "genetic algorithms, genetic programming, Heart,
Evolutionary computation, Transformers, Artificial
intelligence, Hebbian theory, Biological neural
networks, Synapses, Optimization, Gradient Descent,
Neural Networks, ANN, ExprTree",
-
isbn13 = "979-8-3315-3432-5",
-
DOI = "
10.1109/CEC65147.2025.11043088",
-
abstract = "Backed by sophisticated neural networks (NNs) that
simulate the functions of the human brain, Artificial
Intelligence (AI) has positioned itself to address
complex problems in intelligence, as evidenced by
recent breakthroughs such as Convolutional Neural
Networks (CNNs) and Transformers [31]. At the heart of
AI learning lies gradient descent, a foundational
optimisation approach that underpins the well-known
backpropagation algorithm. Interestingly, little is
known about how the human brain's synapses adjust their
strength during learning. A recent article [1]
co-authored by Nobel laureate and AI pioneer Geoffrey
Hinton hypothesizes that the slow evolution of genes
may have driven the brain to approximate the
computation of gradients necessary for efficient
learning. Motivated by this hypothesis, a genetic
programming (GP), ExprTree, was developed to simulate
evolutionary processes and evolve gradient functions,
which were applied to train artificial neural networks,
replacing the gradients typically used in
backpropagation. Experimental results demonstrate that,
through the survival of the fittest, the genetic
algorithm evolves functions that closely approximate
gradient calculations, enabling the convergence of
learning tasks. Furthermore, this evolved-function
approach aligns with Hebbian learning principles [16]
and offers the possibility of low energy consumption.
This research provided the first quantitative evidence
that gradient descent can naturally emerge as a result
of evolutionary processes within neural networks'
learning framework.",
-
notes = "also known as \cite{11043088}",
- }
Genetic Programming entries for
Ailun Shen
Citations