This paper considers the problem of designing accelerated gradient-based algorithms for optimization and saddle-point problems. The class of objective functions is defined by a generalized sector condition. This class of functions contains strongly convex functions with Lipschitz gradients but also non-convex functions, which allows not only to address optimization problems but also saddle-point problems. The proposed design procedure relies on a suitable class of Lyapunov functions and on convex semi-definite programming. The proposed synthesis allows the design of algorithms that reach the performance of state-of-the-art accelerated gradient methods and beyond.
%0 Journal Article
%1 GraEbe21
%A Gramlich, Dennis
%A Ebenbauer, Christian
%A Scherer, Carsten W.
%D 2022
%J Systems & Control Letters
%K EXC2075 PN4 PN4-3 selected
%R 10.1016/j.sysconle.2022.105271
%T Synthesis of Accelerated Gradient Algorithms for Optimization and Saddle Point Problems using Lyapunov functions
%U https://doi.org/10.1016/j.sysconle.2022.105271
%V 165
%X This paper considers the problem of designing accelerated gradient-based algorithms for optimization and saddle-point problems. The class of objective functions is defined by a generalized sector condition. This class of functions contains strongly convex functions with Lipschitz gradients but also non-convex functions, which allows not only to address optimization problems but also saddle-point problems. The proposed design procedure relies on a suitable class of Lyapunov functions and on convex semi-definite programming. The proposed synthesis allows the design of algorithms that reach the performance of state-of-the-art accelerated gradient methods and beyond.
@article{GraEbe21,
abstract = {This paper considers the problem of designing accelerated gradient-based algorithms for optimization and saddle-point problems. The class of objective functions is defined by a generalized sector condition. This class of functions contains strongly convex functions with Lipschitz gradients but also non-convex functions, which allows not only to address optimization problems but also saddle-point problems. The proposed design procedure relies on a suitable class of Lyapunov functions and on convex semi-definite programming. The proposed synthesis allows the design of algorithms that reach the performance of state-of-the-art accelerated gradient methods and beyond.},
added-at = {2024-03-26T11:56:41.000+0100},
author = {Gramlich, Dennis and Ebenbauer, Christian and Scherer, Carsten W.},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/2175f2fc9bd1e64078b90b253a6d6596d/exc2075},
doi = {10.1016/j.sysconle.2022.105271},
eprint = {2006.09946},
eprintclass = {math.OC},
eprinttype = {arXiv},
file = {:GraEbe20 - Convex Synthesis of Accelerated Gradient Algorithms for Optimization and Saddle Point Problems Using Lyapunov Functions.pdf:PDF},
interhash = {5b4472847c14de83890cec5450738518},
intrahash = {175f2fc9bd1e64078b90b253a6d6596d},
journal = {Systems & Control Letters},
keywords = {EXC2075 PN4 PN4-3 selected},
timestamp = {2024-03-27T16:19:07.000+0100},
title = {Synthesis of Accelerated Gradient Algorithms for Optimization and Saddle Point Problems using Lyapunov functions},
url = {https://doi.org/10.1016/j.sysconle.2022.105271},
volume = 165,
year = 2022
}