Commit ef14fbcd authored by Theo Steininger's avatar Theo Steininger
Browse files

Fixed NonlinearConjugateGradient

parent ca2c5d10
......@@ -20,7 +20,7 @@ from .line_searching import *
from .iteration_controlling import *
from .minimizer import Minimizer
from .conjugate_gradient import ConjugateGradient
from .nonlinear_cg import NonlinearCG
from .nonlinear_conjugate_gradient import NonlinearConjugateGradient
from .descent_minimizer import DescentMinimizer
from .steepest_descent import SteepestDescent
from .vl_bfgs import VL_BFGS
......
......@@ -70,7 +70,7 @@ class ConjugateGradient(Minimizer):
"""
controller = self._controller
status = controller.reset(energy)
controller.reset(energy)
r = -energy.gradient
previous_gamma = np.inf
......@@ -93,7 +93,7 @@ class ConjugateGradient(Minimizer):
d = s + d * max(0, gamma/previous_gamma)
previous_gamma = gamma
status = self._controller.check(energy)
status = controller.check(energy)
if status != controller.CONTINUE:
return energy, status
......
......@@ -76,7 +76,7 @@ class DescentMinimizer(Minimizer):
f_k_minus_1 = None
controller = self._controller
self._controller.reset(energy)
controller.reset(energy)
while True:
status = controller.check(energy)
......
......@@ -109,7 +109,8 @@ class LineSearchStrongWolfe(LineSearch):
phi_0 = le_0.value
phiprime_0 = le_0.directional_derivative
if phiprime_0 >= 0:
self.logger.error("Input direction must be a descent direction")
self.logger.error("Input direction must be a descent direction. "
"Gradient: %f" % phiprime_0)
raise RuntimeError
# set alphas
......
......@@ -18,11 +18,13 @@
from __future__ import division
import numpy as np
from .minimizer import Minimizer
from .line_searching import LineSearchStrongWolfe
class NonlinearCG(Minimizer):
class NonlinearConjugateGradient(Minimizer):
""" Implementation of the nonlinear Conjugate Gradient scheme according to
Polak-Ribiere.
......@@ -61,23 +63,28 @@ class NonlinearCG(Minimizer):
"""
controller = self._controller
status = controller.start(energy)
if status != controller.CONTINUE:
return energy, status
f_k_minus_1 = None
controller.reset(energy)
p = -energy.gradient
f_k_minus_1 = None
grad_old = np.inf
# this is a dummy initialization which doesn't hurt since
# energy.gradient must be computed anyway and beta will safely evaluate
# to 0 in the firstiteration
p = abs(energy.gradient)
while True:
grad_old = energy.gradient
f_k = energy.value
energy = self._line_searcher.perform_line_search(energy, p,
f_k_minus_1)
f_k_minus_1 = f_k
status = self._controller.check(energy)
status = controller.check(energy)
if status != controller.CONTINUE:
return energy, status
grad_new = energy.gradient
gnnew = energy.gradient_norm
beta = gnnew*gnnew/(grad_new-grad_old).vdot(p).real
p = beta*p - grad_new
grad_old = energy.gradient
f_k = energy.value
energy = self._line_searcher.perform_line_search(energy, p,
f_k_minus_1)
f_k_minus_1 = f_k
......@@ -3,6 +3,8 @@ import unittest
import numpy as np
from numpy.testing import assert_allclose
import d2o
import nifty as ift
from itertools import product
......@@ -10,21 +12,21 @@ from test.common import expand
spaces = [ift.RGSpace([1024], distances=0.123), ift.HPSpace(32)]
minimizers = [ift.SteepestDescent, ift.RelaxedNewton, ift.VL_BFGS,
ift.ConjugateGradient, ift.NonlinearCG]
ift.ConjugateGradient, ift.NonlinearConjugateGradient]
class Test_Minimizers(unittest.TestCase):
@expand(product(minimizers, spaces))
def test_quadratic_minimization(self, minimizer_class, space):
np.random.seed(42)
d2o.random.seed(42)
starting_point = ift.Field.from_random('normal', domain=space)*10
covariance_diagonal = ift.Field.from_random(
'uniform', domain=space) + 0.5
covariance = ift.DiagonalOperator(space, diagonal=covariance_diagonal)
required_result = ift.Field(space, val=1.)
IC = ift.DefaultIterationController(tol_abs_gradnorm=1e-5)
IC = ift.GradientNormController(tol_abs_gradnorm=1e-5)
minimizer = minimizer_class(controller=IC)
energy = ift.QuadraticEnergy(A=covariance, b=required_result,
position=starting_point)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment