LCOV - code coverage report
Current view: top level - elsa/solvers - OGM.h (source / functions) Hit Total Coverage
Test: coverage-all.lcov Lines: 1 1 100.0 %
Date: 2024-05-16 04:22:26 Functions: 2 2 100.0 %

          Line data    Source code
       1             : #pragma once
       2             : 
       3             : #include <memory>
       4             : #include <optional>
       5             : 
       6             : #include "Solver.h"
       7             : #include "Functional.h"
       8             : #include "DataContainer.h"
       9             : #include "LineSearchMethod.h"
      10             : 
      11             : namespace elsa
      12             : {
      13             :     /**
      14             :      * @brief Class representing the Optimized Gradient Method.
      15             :      *
      16             :      * This class implements the Optimized Gradient Method as introduced by Kim and Fessler in 2016.
      17             :      * OGM is a first order method to efficiently optimize convex functions with
      18             :      * Lipschitz-Continuous gradients. It can be seen as an improvement on Nesterov's Fast Gradient
      19             :      * Method.
      20             :      *
      21             :      * @details
      22             :      * # Optimized Gradient method algorithm overview #
      23             :      * The algorithm repeats the following update steps for \f$i = 0, \dots, N-1\f$
      24             :      * \f{align*}{
      25             :      * y_{i+1} &= x_i - \frac{1}{L} f'(x_i) \\
      26             :      * \Theta_{i+1} &=
      27             :      * \begin{cases}
      28             :      *     \frac{1 + \sqrt{1 + 4 \Theta_i^2}}{2} & i \leq N - 2 \\
      29             :      *     \frac{1 + \sqrt{1 + 8 \Theta_i^2}}{2} & i \leq N - 1 \\
      30             :      * \end{cases} \\
      31             :      * x_{i+1} &= y_{i} + \frac{\Theta_i - 1}{\Theta_{i+1}}(y_{i+1} - y_i) +
      32             :      *     \frac{\Theta_i}{\Theta_{i+1}}(y_{i+1} - x_i)
      33             :      * \f}
      34             :      * The inputs are \f$f \in C_{L}^{1, 1}(\mathbb{R}^d)\f$, \f$x_0 \in \mathbb{R}^d\f$,
      35             :      * \f$y_0 = x_0\f$, \f$t_0 = 1\f$
      36             :      *
      37             :      * ## Comparison  to Nesterov's Fast Gradient ##
      38             :      * The presented algorithm accelerates FGM by introducing an additional momentum term, which
      39             :      * doesn't add a great computational amount.
      40             :      *
      41             :      * ## OGM References ##
      42             :      * - Kim, D., Fessler, J.A. _Optimized first-order methods for smooth convex minimization_
      43             :      (2016) https://doi.org/10.1007/s10107-015-0949-3
      44             :      *
      45             :      * @tparam data_t data type for the domain and range of the problem, defaulting to real_t
      46             :      *
      47             :      * @author
      48             :      * - Michael Loipführer - initial code
      49             :      * - David Frank - Detailed Documentation
      50             :      */
      51             :     template <typename data_t = real_t>
      52             :     class OGM : public Solver<data_t>
      53             :     {
      54             :     public:
      55             :         /// Scalar alias
      56             :         using Scalar = typename Solver<data_t>::Scalar;
      57             : 
      58             :         /**
      59             :          * @brief Constructor for OGM, accepting an optimization problem and, optionally, a value
      60             :          * for epsilon
      61             :          *
      62             :          * @param[in] problem the problem that is supposed to be solved
      63             :          * @param[in] epsilon affects the stopping condition
      64             :          */
      65             :         OGM(const Functional<data_t>& problem,
      66             :             data_t epsilon = std::numeric_limits<data_t>::epsilon());
      67             : 
      68             :         /**
      69             :          * @brief Constructor for OGM, accepting an optimization problem the inverse of a
      70             :          * preconditioner and, optionally, a value for epsilon
      71             :          *
      72             :          * @param[in] problem the problem that is supposed to be solved
      73             :          * @param[in] preconditionerInverse the inverse of the preconditioner
      74             :          * @param[in] epsilon affects the stopping condition
      75             :          */
      76             :         OGM(const Functional<data_t>& problem, const LinearOperator<data_t>& preconditionerInverse,
      77             :             data_t epsilon = std::numeric_limits<data_t>::epsilon());
      78             : 
      79             :         /**
      80             :          * @brief Constructor for OGM, accepting an optimization problem and, optionally, a value
      81             :          * for epsilon
      82             :          *
      83             :          * @param[in] problem the problem that is supposed to be solved
      84             :          * @param[in] lineSearchMethod the line search method to find the step size at
      85             :          * each iteration
      86             :          * @param[in] epsilon affects the stopping condition
      87             :          */
      88             :         OGM(const Functional<data_t>& problem, const LineSearchMethod<data_t>& lineSearchMethod,
      89             :             data_t epsilon = std::numeric_limits<data_t>::epsilon());
      90             : 
      91             :         /**
      92             :          * @brief Constructor for OGM, accepting an optimization problem the inverse of a
      93             :          * preconditioner and, optionally, a value for epsilon
      94             :          *
      95             :          * @param[in] problem the problem that is supposed to be solved
      96             :          * @param[in] preconditionerInverse the inverse of the preconditioner
      97             :          * @param[in] lineSearchMethod the line search method to find the step size at
      98             :          * each iteration
      99             :          * @param[in] epsilon affects the stopping condition
     100             :          */
     101             :         OGM(const Functional<data_t>& problem, const LinearOperator<data_t>& preconditionerInverse,
     102             :             const LineSearchMethod<data_t>& lineSearchMethod,
     103             :             data_t epsilon = std::numeric_limits<data_t>::epsilon());
     104             : 
     105             :         /// make copy constructor deletion explicit
     106             :         OGM(const OGM<data_t>&) = delete;
     107             : 
     108             :         /// default destructor
     109           4 :         ~OGM() override = default;
     110             : 
     111             :         DataContainer<data_t> setup(std::optional<DataContainer<data_t>> x0) override;
     112             : 
     113             :         DataContainer<data_t> step(DataContainer<data_t> x) override;
     114             : 
     115             :         bool shouldStop() const override;
     116             : 
     117             :         std::string formatHeader() const override;
     118             : 
     119             :         std::string formatStep(const DataContainer<data_t>& x) const override;
     120             : 
     121             :     private:
     122             :         /// the differentiable optimizaion problem
     123             :         std::unique_ptr<Functional<data_t>> _problem;
     124             : 
     125             :         /// variable affecting the stopping condition
     126             :         data_t _epsilon;
     127             : 
     128             :         /// the inverse of the preconditioner (if supplied)
     129             :         std::unique_ptr<LinearOperator<data_t>> _preconditionerInverse{};
     130             : 
     131             :         /// the line search method
     132             :         std::unique_ptr<LineSearchMethod<data_t>> _lineSearchMethod;
     133             : 
     134             :         DataContainer<data_t> yOld;
     135             :         DataContainer<data_t> gradient;
     136             :         data_t thetaOld;
     137             :         data_t deltaZero;
     138             : 
     139             :         /// implement the polymorphic clone operation
     140             :         OGM<data_t>* cloneImpl() const override;
     141             : 
     142             :         /// implement the polymorphic comparison operation
     143             :         bool isEqual(const Solver<data_t>& other) const override;
     144             :     };
     145             : } // namespace elsa

Generated by: LCOV version 1.14