quasi_newton_minimizer.py 7 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# NIFTy
# Copyright (C) 2017  Theo Steininger
#
# Author: Theo Steininger
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
18
19
20
21
22
23
24
25
26
27

import abc

import numpy as np

from keepers import Loggable

from .line_searching import LineSearchStrongWolfe


theos's avatar
theos committed
28
class QuasiNewtonMinimizer(Loggable, object):
29
    """A class used by other minimization methods to find a local minimum.
30
    
31
32
33
    Descent minimization methods are used to find a local minimum of a scalar function
    by following a descent direction. This class implements the minimization procedure,
    the descent direction has to be implemented separately.
34
35
36
37
    
    Parameters
    ----------
    line_searcher : callable
38
        Function which finds the step size in descent direction. (default:
39
40
41
42
        LineSearchStrongWolfe())
    callback : function, *optional*
        Function f(energy, iteration_number) specified by the user to print 
        iteration number and energy value at every iteration step. It accepts 
43
        an Energy object(energy) and integer(iteration_number). (default: None)
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
    convergence_tolerance : scalar
        Tolerance specifying convergence. (default: 1E-4)
    convergence_level : integer
        Number of times the tolerance should be undershot before
        exiting. (default: 3)
    iteration_limit : integer *optional*
        Maximum number of iterations performed. (default: None)
    
    Attributes
    ----------
    convergence_tolerance : float
        Tolerance specifying convergence.
    convergence_level : float
        Number of times the tolerance should be undershot before
        exiting.
    iteration_limit : integer
        Maximum number of iterations performed.
    line_searcher : callable
        Function which finds the step size into the descent direction
    callback : function
        Function f(energy, iteration_number) specified by the user to print 
        iteration number and energy value at every iteration step. It accepts 
66
        an Energy object(energy) and integer(iteration_number).
67
68
69
70
71
72
73
74
    
    Raises
    ------
    StopIteration
        Raised if
            *callback function does not match the specified form.
    """    
    
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
    __metaclass__ = abc.ABCMeta

    def __init__(self, line_searcher=LineSearchStrongWolfe(), callback=None,
                 convergence_tolerance=1E-4, convergence_level=3,
                 iteration_limit=None):

        self.convergence_tolerance = np.float(convergence_tolerance)
        self.convergence_level = np.float(convergence_level)

        if iteration_limit is not None:
            iteration_limit = int(iteration_limit)
        self.iteration_limit = iteration_limit

        self.line_searcher = line_searcher
        self.callback = callback

91
    def __call__(self, energy):
92
93
94
        """Runs the minimization on the provided Energy class.

        Accepts the NIFTY Energy class which describes our system and it runs 
95
        the minimization to find the minimum of the system.
96
97
98
99
100
101
102
103
104
        
        Parameters
        ----------
        energy : Energy object
           Energy object provided by the user from which we can calculate the 
           energy, gradient and curvature at a specific point.

        Returns
        -------
105
        energy : Energy object
106
107
108
109
110
111
112
113
114
115
116
117
118
119
            Latest `energy` of the minimization.
        convergence : integer
            Latest convergence level indicating whether the minimization
            has converged or not.
        
        Note
        ----
        It stops the minimization if:
            *callback function does not match the specified form.
            *a perfectly flat point is reached.
            *according to line-search the minimum is found.
            *target convergence level is reached.
            *iteration limit is reached.
            
120
121
122
123
124
125
126
127
128
129
        """

        convergence = 0
        f_k_minus_1 = None
        step_length = 0
        iteration_number = 1

        while True:
            if self.callback is not None:
                try:
130
                    self.callback(energy, iteration_number)
131
132
133
134
135
                except StopIteration:
                    self.logger.info("Minimization was stopped by callback "
                                     "function.")
                    break

136
137
            # compute the the gradient for the current location
            gradient = energy.gradient
138
139
            gradient_norm = gradient.dot(gradient)

140
            # check if position is at a flat point
141
142
143
144
145
            if gradient_norm == 0:
                self.logger.info("Reached perfectly flat point. Stopping.")
                convergence = self.convergence_level+2
                break

146
147
            # current position is encoded in energy object
            descend_direction = self._get_descend_direction(energy)
148

149
150
            # compute the step length, which minimizes energy.value along the
            # search direction
151
152
            step_length, f_k, new_energy = \
                self.line_searcher.perform_line_search(
153
                                               energy=energy,
154
155
                                               pk=descend_direction,
                                               f_k_minus_1=f_k_minus_1)
156
157
            f_k_minus_1 = energy.value
            energy = new_energy
158
159
160
161
162
163
164
165

            # check convergence
            delta = abs(gradient).max() * (step_length/gradient_norm)
            self.logger.debug("Iteration : %08u   step_length = %3.1E   "
                              "delta = %3.1E" %
                              (iteration_number, step_length, delta))
            if delta == 0:
                convergence = self.convergence_level + 2
166
167
                self.logger.info("Found minimum according to line-search. "
                                 "Stopping.")
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
                break
            elif delta < self.convergence_tolerance:
                convergence += 1
                self.logger.info("Updated convergence level to: %u" %
                                 convergence)
                if convergence == self.convergence_level:
                    self.logger.info("Reached target convergence level.")
                    break
            else:
                convergence = max(0, convergence-1)

            if self.iteration_limit is not None:
                if iteration_number == self.iteration_limit:
                    self.logger.warn("Reached iteration limit. Stopping.")
                    break

            iteration_number += 1

186
        return energy, convergence
187
188

    @abc.abstractmethod
189
    def _get_descend_direction(self, energy):
190
        raise NotImplementedError