Commit 3610da8e authored by Martin Reinecke's avatar Martin Reinecke

... and more PEP8

parent a8302a53
......@@ -17,8 +17,6 @@
# and financially supported by the Studienstiftung des deutschen Volkes.
from __future__ import division
import numpy as np
from .minimizer import Minimizer
......@@ -82,7 +80,7 @@ class ConjugateGradient(Minimizer):
while True:
q = energy.curvature(d)
ddotq = d.vdot(q).real
if ddotq==0.:
if ddotq == 0.:
return energy, controller.ERROR
alpha = previous_gamma/ddotq
......@@ -90,7 +88,7 @@ class ConjugateGradient(Minimizer):
return energy, controller.ERROR
r -= q * alpha
energy = energy.at_with_grad(energy.position+d*alpha,-r)
energy = energy.at_with_grad(energy.position+d*alpha, -r)
status = self._controller.check(energy)
if status != controller.CONTINUE:
......
......@@ -19,10 +19,11 @@
from __future__ import print_function
from .iteration_controller import IterationController
class DefaultIterationController(IterationController):
def __init__ (self, tol_abs_gradnorm=None, tol_rel_gradnorm=None,
convergence_level=1, iteration_limit=None, name=None,
verbose=None):
def __init__(self, tol_abs_gradnorm=None, tol_rel_gradnorm=None,
convergence_level=1, iteration_limit=None, name=None,
verbose=None):
super(DefaultIterationController, self).__init__()
self._tol_abs_gradnorm = tol_abs_gradnorm
self._tol_rel_gradnorm = tol_rel_gradnorm
......@@ -57,8 +58,8 @@ class DefaultIterationController(IterationController):
msg += " Iteration #" + str(self._itcount)
msg += " gradnorm=" + str(energy.gradient_norm)
msg += " clvl=" + str(self._ccount)
print (msg)
#self.logger.info(msg)
print(msg)
# self.logger.info(msg)
# Are we done?
if self._iteration_limit is not None:
......
......@@ -18,11 +18,9 @@
from __future__ import division
import abc
import numpy as np
from .minimizer import Minimizer
from .line_searching import LineSearchStrongWolfe
from future.utils import with_metaclass
class DescentMinimizer(Minimizer):
......@@ -107,7 +105,6 @@ class DescentMinimizer(Minimizer):
if status != controller.CONTINUE:
return energy, status
@abc.abstractmethod
def get_descent_direction(self, energy):
raise NotImplementedError
......@@ -19,11 +19,9 @@
from builtins import range
import abc
from ..nifty_meta import NiftyMeta
import numpy as np
from future.utils import with_metaclass
class IterationController(with_metaclass(NiftyMeta, type('NewBase',
(object,), {}))):
"""The abstract base class for all iteration controllers.
......
......@@ -17,12 +17,13 @@
# and financially supported by the Studienstiftung des deutschen Volkes.
import abc
from ...energies import LineEnergy
from future.utils import with_metaclass
class LineSearch(with_metaclass(abc.ABCMeta, with_metaclass(abc.ABCMeta, type('NewBase', (object,), {})))):
class LineSearch(with_metaclass(abc.ABCMeta,
with_metaclass(abc.ABCMeta,
type('NewBase',
(object,), {})))):
"""Class for determining the optimal step size along some descent direction.
Initialize the line search procedure which can be used by a specific line
......
......@@ -109,7 +109,7 @@ class LineSearchStrongWolfe(LineSearch):
phi_0 = le_0.value
phiprime_0 = le_0.directional_derivative
if phiprime_0 >= 0:
raise RuntimeError ("search direction must be a descent direction")
raise RuntimeError("search direction must be a descent direction")
# set alphas
alpha0 = 0.
......@@ -262,7 +262,7 @@ class LineSearchStrongWolfe(LineSearch):
phiprime_alphaj)
else:
#self.logger.error("The line search algorithm (zoom) did not "
# self.logger.error("The line search algorithm (zoom) did not "
# "converge.")
return le_alphaj
......
......@@ -18,11 +18,9 @@
import abc
from ..nifty_meta import NiftyMeta
import numpy as np
from future.utils import with_metaclass
class Minimizer(with_metaclass(NiftyMeta, type('NewBase', (object,), {}))):
""" A base class used by all minimizers.
"""
......
......@@ -17,8 +17,6 @@
# and financially supported by the Studienstiftung des deutschen Volkes.
from __future__ import division
import numpy as np
from .minimizer import Minimizer
from .line_searching import LineSearchStrongWolfe
......@@ -71,7 +69,6 @@ class NonlinearCG(Minimizer):
while True:
grad_old = energy.gradient
gnold = energy.gradient_norm
f_k = energy.value
energy = self._line_searcher.perform_line_search(energy, p,
f_k_minus_1)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment