Neural Operators are a recent class of data-driven models for learning solutions to Partial Differential Equations (PDEs). Traditionally, these models are trained in an autoregressive fashion using data collected at discrete time points in the evolution of the PDE. This setup gives rise to two problems: (i) poor temporal generalization due to error accumulation and (ii) poor zero-shot super-resolution capabilities. To address these issues, we propose Vectorized Conditional Neural Fields (VCNeF), a general framework that utilizes transformers and implicit neural representations to efficiently solve time-dependent PDEs of varying coefficients. A comprehensive evaluation of VCNeF on the challenging 1D and 2D PDEs from PDEBench demonstrates the superiority of our model over four state-of-the-art baselines. Furthermore, our proposed model achieves faster inference and generalizes better to unseen PDE parameters than the compared models.
%0 Conference Paper
%1 hagnberger2024vectorized
%A Hagnberger, Jan
%A Kalimuthu, Marimuthu
%A Musekamp, Daniel
%A Niepert, Mathias
%B Proceedings of the AI4DifferentialEquations in Science workshop at ICLR 2024, May 7-11, 2024, Austria
%D 2024
%I ICLR
%K mls workshop
%T Vectorized Conditional Neural Fields: A Framework for Solving Time-dependent PDEs
%X Neural Operators are a recent class of data-driven models for learning solutions to Partial Differential Equations (PDEs). Traditionally, these models are trained in an autoregressive fashion using data collected at discrete time points in the evolution of the PDE. This setup gives rise to two problems: (i) poor temporal generalization due to error accumulation and (ii) poor zero-shot super-resolution capabilities. To address these issues, we propose Vectorized Conditional Neural Fields (VCNeF), a general framework that utilizes transformers and implicit neural representations to efficiently solve time-dependent PDEs of varying coefficients. A comprehensive evaluation of VCNeF on the challenging 1D and 2D PDEs from PDEBench demonstrates the superiority of our model over four state-of-the-art baselines. Furthermore, our proposed model achieves faster inference and generalizes better to unseen PDE parameters than the compared models.
@inproceedings{hagnberger2024vectorized,
abstract = {Neural Operators are a recent class of data-driven models for learning solutions to Partial Differential Equations (PDEs). Traditionally, these models are trained in an autoregressive fashion using data collected at discrete time points in the evolution of the PDE. This setup gives rise to two problems: (i) poor temporal generalization due to error accumulation and (ii) poor zero-shot super-resolution capabilities. To address these issues, we propose Vectorized Conditional Neural Fields (VCNeF), a general framework that utilizes transformers and implicit neural representations to efficiently solve time-dependent PDEs of varying coefficients. A comprehensive evaluation of VCNeF on the challenging 1D and 2D PDEs from PDEBench demonstrates the superiority of our model over four state-of-the-art baselines. Furthermore, our proposed model achieves faster inference and generalizes better to unseen PDE parameters than the compared models.},
added-at = {2024-03-22T23:33:27.000+0100},
author = {Hagnberger, Jan and Kalimuthu, Marimuthu and Musekamp, Daniel and Niepert, Mathias},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/2db3d84dc046bcd476b6dd79b779292d2/joy},
booktitle = {Proceedings of the AI4DifferentialEquations in Science workshop at ICLR 2024, May 7-11, 2024, Austria},
interhash = {04f53638b4762526b06eba0ca1d57091},
intrahash = {db3d84dc046bcd476b6dd79b779292d2},
keywords = {mls workshop},
month = may,
publisher = {ICLR},
timestamp = {2024-03-22T23:43:42.000+0100},
title = {Vectorized Conditional Neural Fields: A Framework for Solving Time-dependent PDEs},
year = 2024
}