Artificial neural networks (NNs) are one of the most frequently used machine learning approaches to construct interatomic potentials and enable efficient large-scale atomistic simulations with almost ab initio accuracy. However, the simultaneous training of NNs on energies and forces, which are a prerequisite for, e.g., molecular dynamics simulations, can be demanding. In this work, we present an improved NN architecture based on the previous GM-NN model Zaverkin V.; Kästner, J. J. Chem. Theory Comput. 2020, 16, 5410−5421, which shows an improved prediction accuracy and considerably reduced training times. Moreover, we extend the applicability of Gaussian moment-based interatomic potentials to periodic systems and demonstrate the overall excellent transferability and robustness of the respective models. The fast training by the improved methodology is a prerequisite for training-heavy workflows such as active learning or learning-on-the-fly.
%0 Journal Article
%1 doi:10.1021/acs.jctc.1c00527
%A Zaverkin, Viktor
%A Holzmüller, David
%A Steinwart, Ingo
%A Kästner, Johannes
%D 2021
%J Journal of Chemical Theory and Computation
%K EXC2075 pn6
%N 10
%P 6658-6670
%R 10.1021/acs.jctc.1c00527
%T Fast and Sample-Efficient Interatomic Neural Network Potentials for Molecules and Materials Based on Gaussian Moments
%U https://pubs.acs.org/doi/10.1021/acs.jctc.1c00527
%V 17
%X Artificial neural networks (NNs) are one of the most frequently used machine learning approaches to construct interatomic potentials and enable efficient large-scale atomistic simulations with almost ab initio accuracy. However, the simultaneous training of NNs on energies and forces, which are a prerequisite for, e.g., molecular dynamics simulations, can be demanding. In this work, we present an improved NN architecture based on the previous GM-NN model Zaverkin V.; Kästner, J. J. Chem. Theory Comput. 2020, 16, 5410−5421, which shows an improved prediction accuracy and considerably reduced training times. Moreover, we extend the applicability of Gaussian moment-based interatomic potentials to periodic systems and demonstrate the overall excellent transferability and robustness of the respective models. The fast training by the improved methodology is a prerequisite for training-heavy workflows such as active learning or learning-on-the-fly.
@article{doi:10.1021/acs.jctc.1c00527,
abstract = { Artificial neural networks (NNs) are one of the most frequently used machine learning approaches to construct interatomic potentials and enable efficient large-scale atomistic simulations with almost ab initio accuracy. However, the simultaneous training of NNs on energies and forces, which are a prerequisite for, e.g., molecular dynamics simulations, can be demanding. In this work, we present an improved NN architecture based on the previous GM-NN model [Zaverkin V.; Kästner, J. J. Chem. Theory Comput. 2020, 16, 5410−5421], which shows an improved prediction accuracy and considerably reduced training times. Moreover, we extend the applicability of Gaussian moment-based interatomic potentials to periodic systems and demonstrate the overall excellent transferability and robustness of the respective models. The fast training by the improved methodology is a prerequisite for training-heavy workflows such as active learning or learning-on-the-fly. },
added-at = {2021-10-18T10:32:40.000+0200},
author = {Zaverkin, Viktor and Holzmüller, David and Steinwart, Ingo and Kästner, Johannes},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/216972cc8e03472cd6246198ad7b3c1a6/simtechpuma},
doi = {10.1021/acs.jctc.1c00527},
eprint = {https://doi.org/10.1021/acs.jctc.1c00527},
interhash = {ded689e5dca5f542b34bdd372cb59103},
intrahash = {16972cc8e03472cd6246198ad7b3c1a6},
journal = {Journal of Chemical Theory and Computation},
keywords = {EXC2075 pn6},
note = {PMID: 34585927},
number = 10,
pages = {6658-6670},
timestamp = {2021-10-18T08:32:40.000+0200},
title = {Fast and Sample-Efficient Interatomic Neural Network Potentials for Molecules and Materials Based on Gaussian Moments},
url = {https://pubs.acs.org/doi/10.1021/acs.jctc.1c00527},
volume = 17,
year = 2021
}