Symbolic regression, i.e. predicting a function from the observation of its
values, is well-known to be a challenging task. In this paper, we train
Transformers to infer the function or recurrence relation underlying sequences
of integers or floats, a typical task in human IQ tests which has hardly been
tackled in the machine learning literature. We evaluate our integer model on a
subset of OEIS sequences, and show that it outperforms built-in Mathematica
functions for recurrence prediction. We also demonstrate that our float model
is able to yield informative approximations of out-of-vocabulary functions and
constants, e.g. $bessel0(x)\approx
\sin(x)+\cos(x)x$ and $1.644934\pi^2/6$. An
interactive demonstration of our models is provided at
https://symbolicregression.metademolab.com.
Beschreibung
[2201.04600] Deep Symbolic Regression for Recurrent Sequences
%0 Generic
%1 dascoli2022symbolic
%A d'Ascoli, Stéphane
%A Kamienny, Pierre-Alexandre
%A Lample, Guillaume
%A Charton, François
%D 2022
%K ak-symbolic-numeric deep-learning from:adulny maths recurrence-prediction sem_ws23 student-paper symbolic-regression transformer
%T Deep Symbolic Regression for Recurrent Sequences
%U http://arxiv.org/abs/2201.04600
%X Symbolic regression, i.e. predicting a function from the observation of its
values, is well-known to be a challenging task. In this paper, we train
Transformers to infer the function or recurrence relation underlying sequences
of integers or floats, a typical task in human IQ tests which has hardly been
tackled in the machine learning literature. We evaluate our integer model on a
subset of OEIS sequences, and show that it outperforms built-in Mathematica
functions for recurrence prediction. We also demonstrate that our float model
is able to yield informative approximations of out-of-vocabulary functions and
constants, e.g. $bessel0(x)\approx
\sin(x)+\cos(x)x$ and $1.644934\pi^2/6$. An
interactive demonstration of our models is provided at
https://symbolicregression.metademolab.com.
@misc{dascoli2022symbolic,
abstract = {Symbolic regression, i.e. predicting a function from the observation of its
values, is well-known to be a challenging task. In this paper, we train
Transformers to infer the function or recurrence relation underlying sequences
of integers or floats, a typical task in human IQ tests which has hardly been
tackled in the machine learning literature. We evaluate our integer model on a
subset of OEIS sequences, and show that it outperforms built-in Mathematica
functions for recurrence prediction. We also demonstrate that our float model
is able to yield informative approximations of out-of-vocabulary functions and
constants, e.g. $\operatorname{bessel0}(x)\approx
\frac{\sin(x)+\cos(x)}{\sqrt{\pi x}}$ and $1.644934\approx \pi^2/6$. An
interactive demonstration of our models is provided at
https://symbolicregression.metademolab.com.},
added-at = {2023-10-23T11:50:54.000+0200},
author = {d'Ascoli, Stéphane and Kamienny, Pierre-Alexandre and Lample, Guillaume and Charton, François},
biburl = {https://www.bibsonomy.org/bibtex/2097a699f27ce448a496b23296c19d9a4/adulny},
description = {[2201.04600] Deep Symbolic Regression for Recurrent Sequences},
interhash = {1ba41a8896b486dea83d15676afb5104},
intrahash = {097a699f27ce448a496b23296c19d9a4},
keywords = {ak-symbolic-numeric deep-learning from:adulny maths recurrence-prediction sem_ws23 student-paper symbolic-regression transformer},
note = {cite arxiv:2201.04600},
timestamp = {2023-10-30T15:03:27.000+0100},
title = {Deep Symbolic Regression for Recurrent Sequences},
url = {http://arxiv.org/abs/2201.04600},
year = 2022
}