From 8e8e43d06adb2648e842eac4a26410ee334b57cd Mon Sep 17 00:00:00 2001 From: Brandon Amos Date: Fri, 19 Apr 2024 13:05:21 -0400 Subject: [PATCH] add abstract --- publications/all.bib | 3 +++ 1 file changed, 3 insertions(+) diff --git a/publications/all.bib b/publications/all.bib index 9f3dab9..92d5a8d 100644 --- a/publications/all.bib +++ b/publications/all.bib @@ -4,6 +4,9 @@ @misc{sambharya2023learning year={2024}, url={https://arxiv.org/abs/2309.07835}, _venue={JMLR}, + abstract={ +We introduce a machine-learning framework to warm-start fixed-point optimization algorithms. Our architecture consists of a neural network mapping problem parameters to warm starts, followed by a predefined number of fixed-point iterations. We propose two loss functions designed to either minimize the fixed-point residual or the distance to a ground truth solution. In this way, the neural network predicts warm starts with the end-to-end goal of minimizing the downstream loss. An important feature of our architecture is its flexibility, in that it can predict a warm start for fixed-point algorithms run for any number of steps, without being limited to the number of steps it has been trained on. We provide PAC-Bayes generalization bounds on unseen data for common classes of fixed-point operators: contractive, linearly convergent, and averaged. Applying this framework to well-known applications in control, statistics, and signal processing, we observe a significant reduction in the number of iterations and solution time required to solve these problems, through learned warm starts. + } } @misc{amos2023tutorial,