Multistep prediction is the prediction of states based on initial states and a series of control inputs. This paper focuses on developing transformer models for multistep prediction of vehicle states and testing different modifications of the transformer architecture using the example of the prediction of a ship simulation. Research in NLP promises advantages with regard to training time and prediction accuracy for the transformer architecture compared to a state-of-the-art LSTM model. The author also investigates whether positional encodings are useful in this scenario and if a transformer model can learn the order of the inputs without positional encodings.
%0 Generic
%1 https://doi.org/10.18419/opus-11901
%A Bolz, Stefan
%D 2021
%K thesis
%R 10.18419/OPUS-11901
%T Multistep prediction of vehicle states using transformers
%U http://elib.uni-stuttgart.de/handle/11682/11918
%X Multistep prediction is the prediction of states based on initial states and a series of control inputs. This paper focuses on developing transformer models for multistep prediction of vehicle states and testing different modifications of the transformer architecture using the example of the prediction of a ship simulation. Research in NLP promises advantages with regard to training time and prediction accuracy for the transformer architecture compared to a state-of-the-art LSTM model. The author also investigates whether positional encodings are useful in this scenario and if a transformer model can learn the order of the inputs without positional encodings.
@misc{https://doi.org/10.18419/opus-11901,
abstract = {Multistep prediction is the prediction of states based on initial states and a series of control inputs. This paper focuses on developing transformer models for multistep prediction of vehicle states and testing different modifications of the transformer architecture using the example of the prediction of a ship simulation. Research in NLP promises advantages with regard to training time and prediction accuracy for the transformer architecture compared to a state-of-the-art LSTM model. The author also investigates whether positional encodings are useful in this scenario and if a transformer model can learn the order of the inputs without positional encodings.},
added-at = {2024-03-20T22:28:51.000+0100},
author = {Bolz, Stefan},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/2dee25f714d96e4513199bd08b7118c22/joy},
copyright = {info:eu-repo/semantics/openAccess},
doi = {10.18419/OPUS-11901},
institution = {Department of Analytical Computing, Institute for Parallel and Distributed Systems},
interhash = {6b564fb875aa899ba32f9da407418916},
intrahash = {dee25f714d96e4513199bd08b7118c22},
keywords = {thesis},
language = {en},
school = {Universität Stuttgart},
timestamp = {2024-03-22T01:28:40.000+0100},
title = {Multistep prediction of vehicle states using transformers},
type = {Bachelor's Thesis},
url = {http://elib.uni-stuttgart.de/handle/11682/11918},
year = 2021
}