quasi_newton_minimizer.py 4.69 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
# -*- coding: utf-8 -*-

import abc

import numpy as np

from keepers import Loggable

from .line_searching import LineSearchStrongWolfe


class QuasiNewtonMinimizer(object, Loggable):
    __metaclass__ = abc.ABCMeta

    def __init__(self, line_searcher=LineSearchStrongWolfe(), callback=None,
                 convergence_tolerance=1E-4, convergence_level=3,
                 iteration_limit=None):

        self.convergence_tolerance = np.float(convergence_tolerance)
        self.convergence_level = np.float(convergence_level)

        if iteration_limit is not None:
            iteration_limit = int(iteration_limit)
        self.iteration_limit = iteration_limit

        self.line_searcher = line_searcher
        self.callback = callback

29
    def __call__(self, energy):
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
        """
            Runs the steepest descent minimization.

            Parameters
            ----------
            x0 : field
                Starting guess for the minimization.
            alpha : scalar, *optional*
                Starting step width to be multiplied with normalized gradient
                (default: 1).
            tol : scalar, *optional*
                Tolerance specifying convergence; measured by maximal change in
                `x` (default: 1E-4).
            clevel : integer, *optional*
                Number of times the tolerance should be undershot before
                exiting (default: 8).
            self.iteration_limit : integer, *optional*
                Maximum number of iterations performed (default: 100,000).

            Returns
            -------
            x : field
                Latest `x` of the minimization.
            convergence : integer
                Latest convergence level indicating whether the minimization
                has converged or not.

        """

        convergence = 0
        f_k_minus_1 = None
        step_length = 0
        iteration_number = 1

        while True:
            if self.callback is not None:
                try:
67
                    self.callback(energy, iteration_number)
68
69
70
71
72
                except StopIteration:
                    self.logger.info("Minimization was stopped by callback "
                                     "function.")
                    break

73
74
            # compute the the gradient for the current location
            gradient = energy.gradient
75
76
            gradient_norm = gradient.dot(gradient)

77
            # check if position is at a flat point
78
79
80
81
82
            if gradient_norm == 0:
                self.logger.info("Reached perfectly flat point. Stopping.")
                convergence = self.convergence_level+2
                break

83
84
85
            current_position = energy.position
            descend_direction = self._get_descend_direction(current_position,
                                                            gradient)
86

87
88
89
90
            # compute the step length, which minimizes energy.value along the
            # search direction
            step_length, step_length = self.line_searcher.perform_line_search(
                                               energy=energy,
91
92
                                               pk=descend_direction,
                                               f_k_minus_1=f_k_minus_1)
93
94
            new_position = current_position + step_length * descend_direction
            new_energy = energy.at(new_position)
95

96
97
            f_k_minus_1 = energy.value
            energy = new_energy
98
99
100
101
102
103
104
105

            # check convergence
            delta = abs(gradient).max() * (step_length/gradient_norm)
            self.logger.debug("Iteration : %08u   step_length = %3.1E   "
                              "delta = %3.1E" %
                              (iteration_number, step_length, delta))
            if delta == 0:
                convergence = self.convergence_level + 2
106
107
                self.logger.info("Found minimum according to line-search. "
                                 "Stopping.")
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
                break
            elif delta < self.convergence_tolerance:
                convergence += 1
                self.logger.info("Updated convergence level to: %u" %
                                 convergence)
                if convergence == self.convergence_level:
                    self.logger.info("Reached target convergence level.")
                    break
            else:
                convergence = max(0, convergence-1)

            if self.iteration_limit is not None:
                if iteration_number == self.iteration_limit:
                    self.logger.warn("Reached iteration limit. Stopping.")
                    break

            iteration_number += 1

126
        return energy, convergence
127
128
129
130

    @abc.abstractmethod
    def _get_descend_direction(self, gradient, gradient_norm):
        raise NotImplementedError