Commit 3610da8e authored by Martin Reinecke's avatar Martin Reinecke
Browse files

... and more PEP8

parent a8302a53
...@@ -17,8 +17,6 @@ ...@@ -17,8 +17,6 @@
# and financially supported by the Studienstiftung des deutschen Volkes. # and financially supported by the Studienstiftung des deutschen Volkes.
from __future__ import division from __future__ import division
import numpy as np
from .minimizer import Minimizer from .minimizer import Minimizer
...@@ -82,7 +80,7 @@ class ConjugateGradient(Minimizer): ...@@ -82,7 +80,7 @@ class ConjugateGradient(Minimizer):
while True: while True:
q = energy.curvature(d) q = energy.curvature(d)
ddotq = d.vdot(q).real ddotq = d.vdot(q).real
if ddotq==0.: if ddotq == 0.:
return energy, controller.ERROR return energy, controller.ERROR
alpha = previous_gamma/ddotq alpha = previous_gamma/ddotq
...@@ -90,7 +88,7 @@ class ConjugateGradient(Minimizer): ...@@ -90,7 +88,7 @@ class ConjugateGradient(Minimizer):
return energy, controller.ERROR return energy, controller.ERROR
r -= q * alpha r -= q * alpha
energy = energy.at_with_grad(energy.position+d*alpha,-r) energy = energy.at_with_grad(energy.position+d*alpha, -r)
status = self._controller.check(energy) status = self._controller.check(energy)
if status != controller.CONTINUE: if status != controller.CONTINUE:
......
...@@ -19,10 +19,11 @@ ...@@ -19,10 +19,11 @@
from __future__ import print_function from __future__ import print_function
from .iteration_controller import IterationController from .iteration_controller import IterationController
class DefaultIterationController(IterationController): class DefaultIterationController(IterationController):
def __init__ (self, tol_abs_gradnorm=None, tol_rel_gradnorm=None, def __init__(self, tol_abs_gradnorm=None, tol_rel_gradnorm=None,
convergence_level=1, iteration_limit=None, name=None, convergence_level=1, iteration_limit=None, name=None,
verbose=None): verbose=None):
super(DefaultIterationController, self).__init__() super(DefaultIterationController, self).__init__()
self._tol_abs_gradnorm = tol_abs_gradnorm self._tol_abs_gradnorm = tol_abs_gradnorm
self._tol_rel_gradnorm = tol_rel_gradnorm self._tol_rel_gradnorm = tol_rel_gradnorm
...@@ -57,8 +58,8 @@ class DefaultIterationController(IterationController): ...@@ -57,8 +58,8 @@ class DefaultIterationController(IterationController):
msg += " Iteration #" + str(self._itcount) msg += " Iteration #" + str(self._itcount)
msg += " gradnorm=" + str(energy.gradient_norm) msg += " gradnorm=" + str(energy.gradient_norm)
msg += " clvl=" + str(self._ccount) msg += " clvl=" + str(self._ccount)
print (msg) print(msg)
#self.logger.info(msg) # self.logger.info(msg)
# Are we done? # Are we done?
if self._iteration_limit is not None: if self._iteration_limit is not None:
......
...@@ -18,11 +18,9 @@ ...@@ -18,11 +18,9 @@
from __future__ import division from __future__ import division
import abc import abc
import numpy as np
from .minimizer import Minimizer from .minimizer import Minimizer
from .line_searching import LineSearchStrongWolfe from .line_searching import LineSearchStrongWolfe
from future.utils import with_metaclass
class DescentMinimizer(Minimizer): class DescentMinimizer(Minimizer):
...@@ -107,7 +105,6 @@ class DescentMinimizer(Minimizer): ...@@ -107,7 +105,6 @@ class DescentMinimizer(Minimizer):
if status != controller.CONTINUE: if status != controller.CONTINUE:
return energy, status return energy, status
@abc.abstractmethod @abc.abstractmethod
def get_descent_direction(self, energy): def get_descent_direction(self, energy):
raise NotImplementedError raise NotImplementedError
...@@ -19,11 +19,9 @@ ...@@ -19,11 +19,9 @@
from builtins import range from builtins import range
import abc import abc
from ..nifty_meta import NiftyMeta from ..nifty_meta import NiftyMeta
import numpy as np
from future.utils import with_metaclass from future.utils import with_metaclass
class IterationController(with_metaclass(NiftyMeta, type('NewBase', class IterationController(with_metaclass(NiftyMeta, type('NewBase',
(object,), {}))): (object,), {}))):
"""The abstract base class for all iteration controllers. """The abstract base class for all iteration controllers.
......
...@@ -17,12 +17,13 @@ ...@@ -17,12 +17,13 @@
# and financially supported by the Studienstiftung des deutschen Volkes. # and financially supported by the Studienstiftung des deutschen Volkes.
import abc import abc
from ...energies import LineEnergy
from future.utils import with_metaclass from future.utils import with_metaclass
class LineSearch(with_metaclass(abc.ABCMeta, with_metaclass(abc.ABCMeta, type('NewBase', (object,), {})))): class LineSearch(with_metaclass(abc.ABCMeta,
with_metaclass(abc.ABCMeta,
type('NewBase',
(object,), {})))):
"""Class for determining the optimal step size along some descent direction. """Class for determining the optimal step size along some descent direction.
Initialize the line search procedure which can be used by a specific line Initialize the line search procedure which can be used by a specific line
......
...@@ -109,7 +109,7 @@ class LineSearchStrongWolfe(LineSearch): ...@@ -109,7 +109,7 @@ class LineSearchStrongWolfe(LineSearch):
phi_0 = le_0.value phi_0 = le_0.value
phiprime_0 = le_0.directional_derivative phiprime_0 = le_0.directional_derivative
if phiprime_0 >= 0: if phiprime_0 >= 0:
raise RuntimeError ("search direction must be a descent direction") raise RuntimeError("search direction must be a descent direction")
# set alphas # set alphas
alpha0 = 0. alpha0 = 0.
...@@ -262,7 +262,7 @@ class LineSearchStrongWolfe(LineSearch): ...@@ -262,7 +262,7 @@ class LineSearchStrongWolfe(LineSearch):
phiprime_alphaj) phiprime_alphaj)
else: else:
#self.logger.error("The line search algorithm (zoom) did not " # self.logger.error("The line search algorithm (zoom) did not "
# "converge.") # "converge.")
return le_alphaj return le_alphaj
......
...@@ -18,11 +18,9 @@ ...@@ -18,11 +18,9 @@
import abc import abc
from ..nifty_meta import NiftyMeta from ..nifty_meta import NiftyMeta
import numpy as np
from future.utils import with_metaclass from future.utils import with_metaclass
class Minimizer(with_metaclass(NiftyMeta, type('NewBase', (object,), {}))): class Minimizer(with_metaclass(NiftyMeta, type('NewBase', (object,), {}))):
""" A base class used by all minimizers. """ A base class used by all minimizers.
""" """
......
...@@ -17,8 +17,6 @@ ...@@ -17,8 +17,6 @@
# and financially supported by the Studienstiftung des deutschen Volkes. # and financially supported by the Studienstiftung des deutschen Volkes.
from __future__ import division from __future__ import division
import numpy as np
from .minimizer import Minimizer from .minimizer import Minimizer
from .line_searching import LineSearchStrongWolfe from .line_searching import LineSearchStrongWolfe
...@@ -71,7 +69,6 @@ class NonlinearCG(Minimizer): ...@@ -71,7 +69,6 @@ class NonlinearCG(Minimizer):
while True: while True:
grad_old = energy.gradient grad_old = energy.gradient
gnold = energy.gradient_norm
f_k = energy.value f_k = energy.value
energy = self._line_searcher.perform_line_search(energy, p, energy = self._line_searcher.perform_line_search(energy, p,
f_k_minus_1) f_k_minus_1)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment