Line data Source code
1 : #pragma once 2 : 3 : #include "Functional.h" 4 : #include "LineSearchMethod.h" 5 : 6 : namespace elsa 7 : { 8 : /** 9 : * @brief Newton-Raphson method for optimizing $\frac{\partial}{\partial \alpha}f(x + \alpha 10 : * d)$, with $x$ the current guess, $d$ the search direction and $f$ the optimizing function. 11 : * 12 : * The Newton-Raphson method optimizes the function above, by iteratively applying 13 : * $$ 14 : * \alpha = \frac{\langle f^\prime(x), \, d\rangle}{\langle d, \, f^{\prime \prime} d\rangle} 15 : * $$ 16 : * where $f^{\prime \prime}$ is the Hessian of $f$. Hence, $f$ needs to be 17 : * twice continuously differentiable. 18 : * 19 : * Reference: 20 : * - An Introduction to the Conjugate Gradient Method Without the Agonizing Pain, by Jonathan 21 : * Richard Shewchuk https://www.cs.cmu.edu/~quake-papers/painless-conjugate-gradient.pdf 22 : */ 23 : template <class data_t> 24 : class NewtonRaphson : public LineSearchMethod<data_t> 25 : { 26 : public: 27 : explicit NewtonRaphson(const Functional<data_t>& f, index_t iterations = 5); 28 : 29 2 : ~NewtonRaphson() override = default; 30 : 31 : data_t solve(DataContainer<data_t> xi, DataContainer<data_t> di) override; 32 : 33 : /// implement the polymorphic comparison operation 34 : bool isEqual(const LineSearchMethod<data_t>& other) const override; 35 : 36 : private: 37 : /// implement the polymorphic clone operation 38 : NewtonRaphson<data_t>* cloneImpl() const override; 39 : 40 : index_t iters_; 41 : }; 42 : } // namespace elsa