Transformers are widely used in natural language processing due to their ability to model longer-term dependencies in text. Although these models achieve state-of-the-art performance for many language related tasks, their applicability outside of the natural language processing field has been minimal. In this work, we propose the use of transformer models for the prediction of dynamical systems representative of physical phenomena. The use of Koopman based embeddings provides a unique and powerful method for projecting any dynamical system into a vector representation which can then be predicted by a transformer. The proposed model is able to accurately predict various dynamical systems and outperform classical methods that are commonly used in the scientific machine learning literature.11Code available at: https://github.com/zabaras/transformer-physx.
%0 Journal Article
%1 geneva_transformers_2022
%A Geneva, Nicholas
%A Zabaras, Nicholas
%D 2022
%J Neural Networks
%K Deep Koopman, Physics, Self-attention, Surrogate Transformers ecomodelling learning, modeling,
%P 272--289
%R 10.1016/j.neunet.2021.11.022
%T Transformers for modeling physical systems
%U https://www.sciencedirect.com/science/article/pii/S0893608021004500
%V 146
%X Transformers are widely used in natural language processing due to their ability to model longer-term dependencies in text. Although these models achieve state-of-the-art performance for many language related tasks, their applicability outside of the natural language processing field has been minimal. In this work, we propose the use of transformer models for the prediction of dynamical systems representative of physical phenomena. The use of Koopman based embeddings provides a unique and powerful method for projecting any dynamical system into a vector representation which can then be predicted by a transformer. The proposed model is able to accurately predict various dynamical systems and outperform classical methods that are commonly used in the scientific machine learning literature.11Code available at: https://github.com/zabaras/transformer-physx.
@article{geneva_transformers_2022,
abstract = {Transformers are widely used in natural language processing due to their ability to model longer-term dependencies in text. Although these models achieve state-of-the-art performance for many language related tasks, their applicability outside of the natural language processing field has been minimal. In this work, we propose the use of transformer models for the prediction of dynamical systems representative of physical phenomena. The use of Koopman based embeddings provides a unique and powerful method for projecting any dynamical system into a vector representation which can then be predicted by a transformer. The proposed model is able to accurately predict various dynamical systems and outperform classical methods that are commonly used in the scientific machine learning literature.11Code available at: https://github.com/zabaras/transformer-physx.},
added-at = {2023-07-31T08:05:54.000+0200},
author = {Geneva, Nicholas and Zabaras, Nicholas},
biburl = {https://www.bibsonomy.org/bibtex/240aec885f5d9da3d84f16eccfdcf5fcc/jascal_panetzky},
doi = {10.1016/j.neunet.2021.11.022},
file = {Submitted Version:/Users/pascal/Zotero/storage/MJ8M3QJH/Geneva and Zabaras - 2022 - Transformers for modeling physical systems.pdf:application/pdf},
interhash = {ba241b97320daf3e4fe183883c332cb5},
intrahash = {40aec885f5d9da3d84f16eccfdcf5fcc},
issn = {0893-6080},
journal = {Neural Networks},
keywords = {Deep Koopman, Physics, Self-attention, Surrogate Transformers ecomodelling learning, modeling,},
language = {en},
month = feb,
pages = {272--289},
timestamp = {2023-07-31T08:07:14.000+0200},
title = {Transformers for modeling physical systems},
url = {https://www.sciencedirect.com/science/article/pii/S0893608021004500},
urldate = {2023-07-10},
volume = 146,
year = 2022
}