J. Wallbridge. (2020)cite arxiv:2003.00130Comment: 16 pages, 6 figures.
Abstract
We introduce a new deep learning architecture for predicting price movements
from limit order books. This architecture uses a causal convolutional network
for feature extraction in combination with masked self-attention to update
features based on relevant contextual information. This architecture is shown
to significantly outperform existing architectures such as those using
convolutional networks (CNN) and Long-Short Term Memory (LSTM) establishing a
new state-of-the-art benchmark for the FI-2010 dataset.
%0 Generic
%1 wallbridge2020transformers
%A Wallbridge, James
%D 2020
%K LOB finance time_series
%T Transformers for Limit Order Books
%U http://arxiv.org/abs/2003.00130
%X We introduce a new deep learning architecture for predicting price movements
from limit order books. This architecture uses a causal convolutional network
for feature extraction in combination with masked self-attention to update
features based on relevant contextual information. This architecture is shown
to significantly outperform existing architectures such as those using
convolutional networks (CNN) and Long-Short Term Memory (LSTM) establishing a
new state-of-the-art benchmark for the FI-2010 dataset.
@misc{wallbridge2020transformers,
abstract = {We introduce a new deep learning architecture for predicting price movements
from limit order books. This architecture uses a causal convolutional network
for feature extraction in combination with masked self-attention to update
features based on relevant contextual information. This architecture is shown
to significantly outperform existing architectures such as those using
convolutional networks (CNN) and Long-Short Term Memory (LSTM) establishing a
new state-of-the-art benchmark for the FI-2010 dataset.},
added-at = {2023-04-13T17:04:06.000+0200},
author = {Wallbridge, James},
biburl = {https://www.bibsonomy.org/bibtex/2f16cbe945246f472d2f619cb9adc65d1/qilinw},
description = {Transformers for Limit Order Books},
interhash = {83d81003f8f2d32e1434547c0644e777},
intrahash = {f16cbe945246f472d2f619cb9adc65d1},
keywords = {LOB finance time_series},
note = {cite arxiv:2003.00130Comment: 16 pages, 6 figures},
timestamp = {2023-04-13T17:04:06.000+0200},
title = {Transformers for Limit Order Books},
url = {http://arxiv.org/abs/2003.00130},
year = 2020
}