Neuro-Evolutionary Approach to Physics-Aware Symbolic Regression
Created by W.Langdon from
gp-bibliography.bib Revision:1.8464
- @InProceedings{kubalik:2025:GECCO,
-
author = "Jiri Kubalik and Robert Babuska",
-
title = "Neuro-Evolutionary Approach to Physics-Aware Symbolic
Regression",
-
booktitle = "Proceedings of the 2025 Genetic and Evolutionary
Computation Conference",
-
year = "2025",
-
editor = "Bing Xue and Dennis Wilson",
-
pages = "1264--1272",
-
address = "Malaga, Spain",
-
series = "GECCO '25",
-
month = "14-18 " # jul,
-
organisation = "SIGEVO",
-
publisher = "Association for Computing Machinery",
-
publisher_address = "New York, NY, USA",
-
keywords = "genetic algorithms, genetic programming,
Neuroevolution",
-
isbn13 = "979-8-4007-1465-8",
-
URL = "
https://doi.org/10.1145/3712256.3726434",
-
DOI = "
doi:10.1145/3712256.3726434",
-
size = "9 pages",
-
abstract = "Symbolic regression is a technique that can
automatically derive analytic models from data.
Traditionally, symbolic regression has been implemented
primarily through genetic programming that evolves
populations of candidate solutions sampled by genetic
operators, crossover and mutation. More recently,
neural networks have been employed to learn the entire
analytical model, i.e., its structure and coefficients,
using regularized gradient-based optimization. Although
this approach tunes the model's coefficients better, it
is prone to premature convergence to suboptimal model
structures. Here, we propose a neuro-evolutionary
symbolic regression method that combines the strengths
of evolutionary-based search for optimal neural network
(NN) topologies with gradient-based tuning of the
network's parameters. Due to the inherent high
computational demand of evolutionary algorithms, it is
not feasible to learn the parameters of every candidate
NN topology to the full convergence. Thus, our method
employs a memory-based strategy and population
perturbations to enhance exploitation and reduce the
risk of being trapped in suboptimal NNs. In this way,
each NN topology can be trained using only a short
sequence of back-propagation iterations. The proposed
method was experimentally evaluated on three real-world
test problems and has been shown to outperform other
NN-based approaches regarding the quality of the models
obtained.",
-
notes = "GECCO-2025 NE A Recombination of the 34th
International Conference on Genetic Algorithms (ICGA)
and the 30th Annual Genetic Programming Conference
(GP)",
- }
Genetic Programming entries for
Jiri Kubalik
Robert Babuska
Citations