In this paper we consider the problem of approximating vector-valued
functions over a domain $Ømega$. For this purpose, we use matrix-valued
reproducing kernels, which can be related to Reproducing kernel Hilbert
spaces of vectorial functions and which can be viewed as an extension
of the scalar-valued case. These spaces seem promising, when modelling
correlations between the target function components, as the components
are not learned independently of each other. We focus on the interpolation
with such matrix-valued kernels. We derive error bounds for the interpolation
error in terms of a generalized power-function and we introduce a
subclass of matrix-valued kernels whose power-functions can be traced
back to the power-function of scalar-valued reproducing kernels.
Finally, we apply these kind of kernels to some artificial data to
illustrate the benefit of interpolation with matrix-valued kernels
in comparison to a componentwise approach.
%0 Journal Article
%1 Wittwar2018
%A Wittwar, Dominik
%A Santin, Gabriele
%A Haasdonk, Bernard
%D 2018
%J Dolomites Res. Notes Approx.
%K anm ians imported
%P 23--29
%R 10.14658/pupj-drna-2018-3-4
%T Interpolation with uncoupled separable matrix-valued kernels
%U https://drna.padovauniversitypress.it/2018/3/4
%V 11
%X In this paper we consider the problem of approximating vector-valued
functions over a domain $Ømega$. For this purpose, we use matrix-valued
reproducing kernels, which can be related to Reproducing kernel Hilbert
spaces of vectorial functions and which can be viewed as an extension
of the scalar-valued case. These spaces seem promising, when modelling
correlations between the target function components, as the components
are not learned independently of each other. We focus on the interpolation
with such matrix-valued kernels. We derive error bounds for the interpolation
error in terms of a generalized power-function and we introduce a
subclass of matrix-valued kernels whose power-functions can be traced
back to the power-function of scalar-valued reproducing kernels.
Finally, we apply these kind of kernels to some artificial data to
illustrate the benefit of interpolation with matrix-valued kernels
in comparison to a componentwise approach.
@article{Wittwar2018,
abstract = {In this paper we consider the problem of approximating vector-valued
functions over a domain $\Omega$. For this purpose, we use matrix-valued
reproducing kernels, which can be related to Reproducing kernel Hilbert
spaces of vectorial functions and which can be viewed as an extension
of the scalar-valued case. These spaces seem promising, when modelling
correlations between the target function components, as the components
are not learned independently of each other. We focus on the interpolation
with such matrix-valued kernels. We derive error bounds for the interpolation
error in terms of a generalized power-function and we introduce a
subclass of matrix-valued kernels whose power-functions can be traced
back to the power-function of scalar-valued reproducing kernels.
Finally, we apply these kind of kernels to some artificial data to
illustrate the benefit of interpolation with matrix-valued kernels
in comparison to a componentwise approach.},
added-at = {2021-09-29T14:33:27.000+0200},
author = {Wittwar, Dominik and Santin, Gabriele and Haasdonk, Bernard},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/247292f23ad37e4ac569ba498fb880ed2/britsteiner},
doi = {10.14658/pupj-drna-2018-3-4},
file = {:PDF/Wittwar2018_uncoupled_journal.pdf:PDF},
fjournal = {Dolomites Research Notes on Approximation},
interhash = {06bcd2546a8b611b0d83f6f25e5ddd66},
intrahash = {47292f23ad37e4ac569ba498fb880ed2},
journal = {Dolomites Res. Notes Approx.},
keywords = {anm ians imported},
owner = {santinge},
pages = {23--29},
sjournal = {Dolomites Res.\ Notes Approx.},
timestamp = {2021-09-29T12:35:04.000+0200},
title = {Interpolation with uncoupled separable matrix-valued kernels},
url = {https://drna.padovauniversitypress.it/2018/3/4},
volume = 11,
year = 2018
}