iteration_controllers.py 13.6 KB
Newer Older
Martin Reinecke's avatar
Martin Reinecke committed
1
2
3
4
5
6
7
8
9
10
11
12
13
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
Philipp Arras's avatar
Philipp Arras committed
14
# Copyright(C) 2013-2020 Max-Planck-Society
Martin Reinecke's avatar
Martin Reinecke committed
15
#
16
# NIFTy is being developed at the Max-Planck-Institut fuer Astrophysik.
Martin Reinecke's avatar
Martin Reinecke committed
17

Philipp Arras's avatar
Philipp Arras committed
18
import functools
Philipp Arras's avatar
Philipp Arras committed
19
from time import time
Philipp Arras's avatar
Philipp Arras committed
20

21
22
import numpy as np

Martin Reinecke's avatar
Martin Reinecke committed
23
from ..logger import logger
Martin Reinecke's avatar
Martin Reinecke committed
24
from ..utilities import NiftyMeta
Martin Reinecke's avatar
Martin Reinecke committed
25
26


Martin Reinecke's avatar
Martin Reinecke committed
27
class IterationController(metaclass=NiftyMeta):
Martin Reinecke's avatar
Martin Reinecke committed
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
    """The abstract base class for all iteration controllers.
    An iteration controller is an object that monitors the progress of a
    minimization iteration. At the begin of the minimization, its start()
    method is called with the energy object at the initial position.
    Afterwards, its check() method is called during every iteration step with
    the energy object describing the current position.
    Based on that information, the iteration controller has to decide whether
    iteration needs to progress further (in this case it returns CONTINUE), or
    if sufficient convergence has been reached (in this case it returns
    CONVERGED), or if some error has been detected (then it returns ERROR).

    The concrete convergence criteria can be chosen by inheriting from this
    class; the implementer has full flexibility to use whichever criteria are
    appropriate for a particular problem - as long as they can be computed from
    the information passed to the controller during the iteration process.
Philipp Arras's avatar
Philipp Arras committed
43
44
45
46

    For analyzing minimization procedures IterationControllers can log energy
    values together with the respective time stamps. In order to activate this
    feature `activate_and_reset_logging()` needs to be called.
Martin Reinecke's avatar
Martin Reinecke committed
47
48
49
50
    """

    CONVERGED, CONTINUE, ERROR = list(range(3))

Philipp Arras's avatar
Philipp Arras committed
51
52
53
    def __init__(self):
        self._history = None

Martin Reinecke's avatar
Martin Reinecke committed
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
    def start(self, energy):
        """Starts the iteration.

        Parameters
        ----------
        energy : Energy object
           Energy object at the start of the iteration

        Returns
        -------
        status : integer status, can be CONVERGED, CONTINUE or ERROR
        """
        raise NotImplementedError

    def check(self, energy):
        """Checks the state of the iteration. Called after every step.

        Parameters
        ----------
        energy : Energy object
           Energy object at the start of the iteration

        Returns
        -------
        status : integer status, can be CONVERGED, CONTINUE or ERROR
        """
        raise NotImplementedError

Philipp Arras's avatar
Philipp Arras committed
82
    def pop_history(self):
Philipp Arras's avatar
Philipp Arras committed
83
84
85
86
87
88
89
        """Returns the collected history of energy values and resets the
        history afterwards.

        Returns
        -------
        list : List of (unix timestamp, energy values) tuples
        """
Philipp Arras's avatar
Philipp Arras committed
90
91
92
93
94
95
96
        if self._history is None:
            raise RuntimeError('No history was taken')
        res = self._history
        self._history = []
        return res

    def activate_and_reset_logging(self):
Philipp Arras's avatar
Philipp Arras committed
97
98
        """Activates the logging functionality. If the log has been populated
        before, it is reset."""
Philipp Arras's avatar
Philipp Arras committed
99
100
        self._history = []

Philipp Arras's avatar
Philipp Arras committed
101
102
103
104
105
106
    def activate_logging(self):
        """Activates the logging functionality. If the log has been populated
        before, it stays as it is."""
        if self._history is None:
            self._history = []

Philipp Arras's avatar
Philipp Arras committed
107
108
109
110
111

def append_history(func):
    @functools.wraps(func)
    def wrapper(*args, **kwargs):
        if args[0]._history is not None:
Philipp Arras's avatar
Philipp Arras committed
112
113
            energy_val = args[1].value
            args[0]._history.append((time(), energy_val))
Philipp Arras's avatar
Philipp Arras committed
114
115
116
        return func(*args, **kwargs)
    return wrapper

Martin Reinecke's avatar
Martin Reinecke committed
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141

class GradientNormController(IterationController):
    """An iteration controller checking (mainly) the L2 gradient norm.

    Parameters
    ----------
    tol_abs_gradnorm : float, optional
        If the L2 norm of the energy gradient is below this value, the
        convergence counter will be increased in this iteration.
    tol_rel_gradnorm : float, optional
        If the L2 norm of the energy gradient divided by its initial L2 norm
        is below this value, the convergence counter will be increased in this
        iteration.
    convergence_level : int, default=1
        The number which the convergence counter must reach before the
        iteration is considered to be converged
    iteration_limit : int, optional
        The maximum number of iterations that will be carried out.
    name : str, optional
        if supplied, this string and some diagnostic information will be
        printed after every iteration
    """

    def __init__(self, tol_abs_gradnorm=None, tol_rel_gradnorm=None,
                 convergence_level=1, iteration_limit=None, name=None):
Philipp Arras's avatar
Philipp Arras committed
142
        super(GradientNormController, self).__init__()
Martin Reinecke's avatar
Martin Reinecke committed
143
144
145
146
147
148
        self._tol_abs_gradnorm = tol_abs_gradnorm
        self._tol_rel_gradnorm = tol_rel_gradnorm
        self._convergence_level = convergence_level
        self._iteration_limit = iteration_limit
        self._name = name

Philipp Arras's avatar
Philipp Arras committed
149
    @append_history
Martin Reinecke's avatar
Martin Reinecke committed
150
151
152
153
154
155
156
157
    def start(self, energy):
        self._itcount = -1
        self._ccount = 0
        if self._tol_rel_gradnorm is not None:
            self._tol_rel_gradnorm_now = self._tol_rel_gradnorm \
                                       * energy.gradient_norm
        return self.check(energy)

Philipp Arras's avatar
Philipp Arras committed
158
    @append_history
Martin Reinecke's avatar
Martin Reinecke committed
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
    def check(self, energy):
        self._itcount += 1

        inclvl = False
        if self._tol_abs_gradnorm is not None:
            if energy.gradient_norm <= self._tol_abs_gradnorm:
                inclvl = True
        if self._tol_rel_gradnorm is not None:
            if energy.gradient_norm <= self._tol_rel_gradnorm_now:
                inclvl = True
        if inclvl:
            self._ccount += 1
        else:
            self._ccount = max(0, self._ccount-1)

        # report
        if self._name is not None:
            logger.info(
                "{}: Iteration #{} energy={:.6E} gradnorm={:.2E} clvl={}"
                .format(self._name, self._itcount, energy.value,
                        energy.gradient_norm, self._ccount))
Martin Reinecke's avatar
Martin Reinecke committed
180
181
182
183
184

        # Are we done?
        if self._iteration_limit is not None:
            if self._itcount >= self._iteration_limit:
                logger.warning(
Martin Reinecke's avatar
tweaks    
Martin Reinecke committed
185
                    "{}Iteration limit reached. Assuming convergence"
Martin Reinecke's avatar
Martin Reinecke committed
186
187
188
189
190
191
192
193
194
                    .format("" if self._name is None else self._name+": "))
                return self.CONVERGED
        if self._ccount >= self._convergence_level:
            return self.CONVERGED

        return self.CONTINUE


class GradInfNormController(IterationController):
Martin Reinecke's avatar
Martin Reinecke committed
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
    """An iteration controller checking (mainly) the L_infinity gradient norm.

    Parameters
    ----------
    tol : float
        If the L_infinity norm of the energy gradient is below this value, the
        convergence counter will be increased in this iteration.
    convergence_level : int, default=1
        The number which the convergence counter must reach before the
        iteration is considered to be converged
    iteration_limit : int, optional
        The maximum number of iterations that will be carried out.
    name : str, optional
        if supplied, this string and some diagnostic information will be
        printed after every iteration
    """

    def __init__(self, tol, convergence_level=1, iteration_limit=None,
Martin Reinecke's avatar
Martin Reinecke committed
213
                 name=None):
Philipp Arras's avatar
Philipp Arras committed
214
        super(GradInfNormController, self).__init__()
Martin Reinecke's avatar
Martin Reinecke committed
215
216
217
218
219
        self._tol = tol
        self._convergence_level = convergence_level
        self._iteration_limit = iteration_limit
        self._name = name

Philipp Arras's avatar
Philipp Arras committed
220
    @append_history
Martin Reinecke's avatar
Martin Reinecke committed
221
222
223
224
225
    def start(self, energy):
        self._itcount = -1
        self._ccount = 0
        return self.check(energy)

Philipp Arras's avatar
Philipp Arras committed
226
    @append_history
Martin Reinecke's avatar
Martin Reinecke committed
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
    def check(self, energy):
        self._itcount += 1

        crit = energy.gradient.norm(np.inf) / abs(energy.value)
        if self._tol is not None and crit <= self._tol:
            self._ccount += 1
        else:
            self._ccount = max(0, self._ccount-1)

        # report
        if self._name is not None:
            logger.info(
                "{}: Iteration #{} energy={:.6E} crit={:.2E} clvl={}"
                .format(self._name, self._itcount, energy.value,
                        crit, self._ccount))
Martin Reinecke's avatar
Martin Reinecke committed
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256

        # Are we done?
        if self._iteration_limit is not None:
            if self._itcount >= self._iteration_limit:
                logger.warning(
                    "{} Iteration limit reached. Assuming convergence"
                    .format("" if self._name is None else self._name+": "))
                return self.CONVERGED
        if self._ccount >= self._convergence_level:
            return self.CONVERGED

        return self.CONTINUE


class DeltaEnergyController(IterationController):
Martin Reinecke's avatar
Martin Reinecke committed
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
    """An iteration controller checking (mainly) the energy change from one
    iteration to the next.

    Parameters
    ----------
    tol_rel_deltaE : float
        If the difference between the last and current energies divided by
        the current energy is below this value, the convergence counter will
        be increased in this iteration.
    convergence_level : int, default=1
        The number which the convergence counter must reach before the
        iteration is considered to be converged
    iteration_limit : int, optional
        The maximum number of iterations that will be carried out.
    name : str, optional
        if supplied, this string and some diagnostic information will be
        printed after every iteration
    """

Martin Reinecke's avatar
Martin Reinecke committed
276
277
    def __init__(self, tol_rel_deltaE, convergence_level=1,
                 iteration_limit=None, name=None):
Philipp Arras's avatar
Philipp Arras committed
278
        super(DeltaEnergyController, self).__init__()
Martin Reinecke's avatar
Martin Reinecke committed
279
280
281
282
283
        self._tol_rel_deltaE = tol_rel_deltaE
        self._convergence_level = convergence_level
        self._iteration_limit = iteration_limit
        self._name = name

Philipp Arras's avatar
Philipp Arras committed
284
    @append_history
Martin Reinecke's avatar
Martin Reinecke committed
285
286
287
288
289
290
    def start(self, energy):
        self._itcount = -1
        self._ccount = 0
        self._Eold = 0.
        return self.check(energy)

Philipp Arras's avatar
Philipp Arras committed
291
    @append_history
Martin Reinecke's avatar
Martin Reinecke committed
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
    def check(self, energy):
        self._itcount += 1

        inclvl = False
        Eval = energy.value
        rel = abs(self._Eold-Eval)/max(abs(self._Eold), abs(Eval))
        if self._itcount > 0:
            if rel < self._tol_rel_deltaE:
                inclvl = True
        self._Eold = Eval
        if inclvl:
            self._ccount += 1
        else:
            self._ccount = max(0, self._ccount-1)

        # report
        if self._name is not None:
            logger.info(
                "{}: Iteration #{} energy={:.6E} reldiff={:.6E} clvl={}"
                .format(self._name, self._itcount, Eval, rel, self._ccount))

        # Are we done?
        if self._iteration_limit is not None:
            if self._itcount >= self._iteration_limit:
                logger.warning(
                    "{} Iteration limit reached. Assuming convergence"
                    .format("" if self._name is None else self._name+": "))
                return self.CONVERGED
        if self._ccount >= self._convergence_level:
            return self.CONVERGED

        return self.CONTINUE
Philipp Arras's avatar
Philipp Arras committed
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345


class AbsDeltaEnergyController(IterationController):
    """An iteration controller checking (mainly) the energy change from one
    iteration to the next.

    Parameters
    ----------
    deltaE : float
        If the difference between the last and current energies is below this
        value, the convergence counter will be increased in this iteration.
    convergence_level : int, default=1
        The number which the convergence counter must reach before the
        iteration is considered to be converged
    iteration_limit : int, optional
        The maximum number of iterations that will be carried out.
    name : str, optional
        if supplied, this string and some diagnostic information will be
        printed after every iteration
    """

    def __init__(self, deltaE, convergence_level=1, iteration_limit=None,
346
                 name=None):
Philipp Arras's avatar
Philipp Arras committed
347
        super(AbsDeltaEnergyController, self).__init__()
Philipp Arras's avatar
Philipp Arras committed
348
349
350
351
352
        self._deltaE = deltaE
        self._convergence_level = convergence_level
        self._iteration_limit = iteration_limit
        self._name = name

Philipp Arras's avatar
Philipp Arras committed
353
    @append_history
Philipp Arras's avatar
Philipp Arras committed
354
355
356
357
358
359
    def start(self, energy):
        self._itcount = -1
        self._ccount = 0
        self._Eold = 0.
        return self.check(energy)

Philipp Arras's avatar
Philipp Arras committed
360
    @append_history
Philipp Arras's avatar
Philipp Arras committed
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
    def check(self, energy):
        self._itcount += 1

        inclvl = False
        Eval = energy.value
        diff = abs(self._Eold-Eval)
        if self._itcount > 0:
            if diff < self._deltaE:
                inclvl = True
        self._Eold = Eval
        if inclvl:
            self._ccount += 1
        else:
            self._ccount = max(0, self._ccount-1)

        # report
        if self._name is not None:
            logger.info(
379
380
381
                "{}: Iteration #{} energy={:.6E} diff={:.6E} crit={:.1E} clvl={}"
                .format(self._name, self._itcount, Eval, diff, self._deltaE,
                        self._ccount))
Philipp Arras's avatar
Philipp Arras committed
382
383
384
385
386
387
388
389
390
391
392
393

        # Are we done?
        if self._iteration_limit is not None:
            if self._itcount >= self._iteration_limit:
                logger.warning(
                    "{} Iteration limit reached. Assuming convergence"
                    .format("" if self._name is None else self._name+": "))
                return self.CONVERGED
        if self._ccount >= self._convergence_level:
            return self.CONVERGED

        return self.CONTINUE