Commit fd42d3d8 authored by Theo Steininger's avatar Theo Steininger
Browse files

Refactoring

parent 0b6d6abd
......@@ -19,52 +19,54 @@
from __future__ import print_function
from .iteration_controller import IterationController
class DefaultIterationController(IterationController):
def __init__ (self, tol_abs_gradnorm=None, tol_rel_gradnorm=None,
convergence_level=1, iteration_limit=None, name=None,
verbose=None):
def __init__(self, tol_abs_gradnorm=None, tol_rel_gradnorm=None,
convergence_level=1, iteration_limit=None, name="",
verbose=None):
super(DefaultIterationController, self).__init__()
self._tol_abs_gradnorm = tol_abs_gradnorm
self._tol_rel_gradnorm = tol_rel_gradnorm
self._tol_rel_gradnorm_now = None
self._convergence_level = convergence_level
self._iteration_limit = iteration_limit
self._name = name
self._verbose = verbose
def start(self, energy):
self._itcount = -1
self._ccount = 0
self._iteration_count = -1
self._convergence_count = 0
if self._tol_rel_gradnorm is not None:
self._tol_rel_gradnorm_now = self._tol_rel_gradnorm \
* energy.gradient_norm
return self.check(energy)
def check(self, energy):
self._itcount += 1
self._iteration_count += 1
if self._tol_abs_gradnorm is not None:
if energy.gradient_norm <= self._tol_abs_gradnorm:
self._ccount += 1
self._convergence_count += 1
if self._tol_rel_gradnorm is not None:
if energy.gradient_norm <= self._tol_rel_gradnorm_now:
self._ccount += 1
self._convergence_count += 1
self.logger.debug(
"")
# report
if self._verbose:
msg = ""
if self._name is not None:
msg += self._name+":"
msg += " Iteration #" + str(self._itcount)
msg += " Iteration #" + str(self._iteration_count)
msg += " gradnorm=" + str(energy.gradient_norm)
msg += " convergence level=" + str(self._ccount)
msg += " convergence level=" + str(self._convergence_count)
print (msg)
self.logger.info(msg)
# Are we done?
if self._iteration_limit is not None:
if self._itcount >= self._iteration_limit:
if self._iteration_count >= self._iteration_limit:
return self.CONVERGED
if self._ccount >= self._convergence_level:
if self._convergence_count >= self._convergence_level:
return self.CONVERGED
return self.CONTINUE
......@@ -18,11 +18,9 @@
from __future__ import division
import abc
import numpy as np
from .minimizer import Minimizer
from .line_searching import LineSearchStrongWolfe
from future.utils import with_metaclass
class DescentMinimizer(Minimizer):
......@@ -112,7 +110,6 @@ class DescentMinimizer(Minimizer):
if status != controller.CONTINUE:
return energy, status
@abc.abstractmethod
def get_descent_direction(self, energy):
raise NotImplementedError
......@@ -20,13 +20,12 @@ from builtins import range
import abc
from ..nifty_meta import NiftyMeta
import numpy as np
from keepers import Loggable
from future.utils import with_metaclass
class IterationController(with_metaclass(NiftyMeta, type('NewBase',
(Loggable, object), {}))):
class IterationController(
with_metaclass(NiftyMeta, type('NewBase', (Loggable, object), {}))):
"""The abstract base class for all iteration controllers.
An iteration controller is an object that monitors the progress of a
minimization iteration. At the begin of the minimization, its start()
......@@ -46,6 +45,18 @@ class IterationController(with_metaclass(NiftyMeta, type('NewBase',
CONVERGED, CONTINUE, ERROR = list(range(3))
def __init__(self):
self._iteration_count = -1
self._convergence_count = 0
@property
def iteration_count(self):
return self._iteration_count
@property
def convergence_count(self):
return self._convergence_count
@abc.abstractmethod
def start(self, energy):
"""
......
......@@ -19,12 +19,12 @@
import abc
from ..nifty_meta import NiftyMeta
import numpy as np
from keepers import Loggable
from future.utils import with_metaclass
class Minimizer(with_metaclass(NiftyMeta, type('NewBase', (Loggable, object), {}))):
class Minimizer(
with_metaclass(NiftyMeta, type('NewBase', (Loggable, object), {}))):
""" A base class used by all minimizers.
"""
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment