gradient_norm_controller.py 4.7 KB
Newer Older
Martin Reinecke's avatar
Martin Reinecke committed
1
2
3
4
5
6
7
8
9
10
11
12
13
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
Martin Reinecke's avatar
Martin Reinecke committed
14
# Copyright(C) 2013-2018 Max-Planck-Society
Martin Reinecke's avatar
Martin Reinecke committed
15
16
17
18
19
#
# NIFTy is being developed at the Max-Planck-Institut fuer Astrophysik
# and financially supported by the Studienstiftung des deutschen Volkes.

from .iteration_controller import IterationController
Martin Reinecke's avatar
Martin Reinecke committed
20
from .. import dobj
Martin Reinecke's avatar
Martin Reinecke committed
21

Martin Reinecke's avatar
Martin Reinecke committed
22

23
class GradientNormController(IterationController):
Martin Reinecke's avatar
Martin Reinecke committed
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
    """An iteration controller checking (mainly) the L2 gradient norm.

    Parameters
    ----------
    tol_abs_gradnorm : float, optional
        If the L2 norm of the energy gradient is below this value, the
        convergence counter will be increased in this iteration.
    tol_rel_gradnorm : float, optional
        If the L2 norm of the energy gradient divided by its initial L2 norm
        is below this value, the convergence counter will be increased in this
        iteration.
    convergence_level : int, default=1
        The number which the convergence counter must reach before the
        iteration is considered to be converged
    iteration_limit : int, optional
        The maximum number of iterations that will be carried out.
    name : str, optional
        if supplied, this string and some diagnostic information will be
        printed after every iteration
    """

Martin Reinecke's avatar
Martin Reinecke committed
45
    def __init__(self, tol_abs_gradnorm=None, tol_rel_gradnorm=None,
46
                 convergence_level=1, iteration_limit=None, name=None):
47
        super(GradientNormController, self).__init__()
48
        self._tol_abs_gradnorm = tol_abs_gradnorm
Martin Reinecke's avatar
tweaks    
Martin Reinecke committed
49
        self._tol_rel_gradnorm = tol_rel_gradnorm
Martin Reinecke's avatar
Martin Reinecke committed
50
51
        self._convergence_level = convergence_level
        self._iteration_limit = iteration_limit
52
        self._name = name
Martin Reinecke's avatar
Martin Reinecke committed
53

54
    def start(self, energy):
Martin Reinecke's avatar
Martin Reinecke committed
55
56
57
58
59
60
61
62
63
64
65
66
67
68
        """ Start a new iteration.

        The iteration and convergence counters are set to 0.

        Parameters
        ----------
        energy : Energy
            The energy functional to be minimized.

        Returns
        -------
        int : iteration status
            can be CONVERGED or CONTINUE
        """
Martin Reinecke's avatar
Martin Reinecke committed
69
70
        self._itcount = -1
        self._ccount = 0
Martin Reinecke's avatar
tweaks    
Martin Reinecke committed
71
        if self._tol_rel_gradnorm is not None:
Martin Reinecke's avatar
cleanup    
Martin Reinecke committed
72
73
            self._tol_rel_gradnorm_now = self._tol_rel_gradnorm \
                                       * energy.gradient_norm
74
        return self.check(energy)
Martin Reinecke's avatar
Martin Reinecke committed
75

76
    def check(self, energy):
Martin Reinecke's avatar
Martin Reinecke committed
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
        """ Check for convergence.

        - Increase the iteration counter by 1.
        - If any of the convergence criteria are fulfilled, increase the
          convergence counter by 1; else decrease it by 1 (but not below 0).
        - If the convergence counter exceeds the convergence level, return
          CONVERGED.
        - If the iteration counter exceeds the iteration limit, return
          CONVERGED.
        - Otherwise return CONTINUE.

        Parameters
        ----------
        energy : Energy
            The current solution estimate

        Returns
        -------
        int : iteration status
            can be CONVERGED or CONTINUE
        """
Martin Reinecke's avatar
Martin Reinecke committed
98
        self._itcount += 1
99

100
        inclvl = False
101
102
        if self._tol_abs_gradnorm is not None:
            if energy.gradient_norm <= self._tol_abs_gradnorm:
103
                inclvl = True
Martin Reinecke's avatar
tweaks    
Martin Reinecke committed
104
        if self._tol_rel_gradnorm is not None:
Martin Reinecke's avatar
cleanup    
Martin Reinecke committed
105
            if energy.gradient_norm <= self._tol_rel_gradnorm_now:
106
107
108
109
110
                inclvl = True
        if inclvl:
            self._ccount += 1
        else:
            self._ccount = max(0, self._ccount-1)
111
112

        # report
113
114
        if self._name is not None:
            msg = self._name+":"
115
            msg += " Iteration #" + str(self._itcount)
Philipp Arras's avatar
Philipp Arras committed
116
            msg += " energy={:.6E}".format(energy.value)
Philipp Arras's avatar
Philipp Arras committed
117
            msg += " gradnorm={:.2E}".format(energy.gradient_norm)
Martin Reinecke's avatar
Martin Reinecke committed
118
            msg += " clvl=" + str(self._ccount)
Martin Reinecke's avatar
Martin Reinecke committed
119
            dobj.mprint(msg)
Martin Reinecke's avatar
Martin Reinecke committed
120
            # self.logger.info(msg)
121
122
123
124

        # Are we done?
        if self._iteration_limit is not None:
            if self._itcount >= self._iteration_limit:
125
126
                dobj.mprint(
                    "Warning:Iteration limit reached. Assuming convergence")
127
                return self.CONVERGED
Martin Reinecke's avatar
tweaks    
Martin Reinecke committed
128
129
        if self._ccount >= self._convergence_level:
            return self.CONVERGED
Martin Reinecke's avatar
Martin Reinecke committed
130
131

        return self.CONTINUE