diff --git a/nifty/minimization/conjugate_gradient.py b/nifty/minimization/conjugate_gradient.py index 536589128f3cc2263eaab186727cabcedb7275dc..035c4c92229a542c2faa18a310f6f7c19d5ae194 100644 --- a/nifty/minimization/conjugate_gradient.py +++ b/nifty/minimization/conjugate_gradient.py @@ -20,6 +20,7 @@ from __future__ import division import numpy as np from keepers import Loggable +from nifty import Field class ConjugateGradient(Loggable, object): @@ -75,7 +76,7 @@ class ConjugateGradient(Loggable, object): """ - def __init__(self, convergence_tolerance=1E-4, convergence_level=3, + def __init__(self, convergence_tolerance=1E-8, convergence_level=3, iteration_limit=None, reset_count=None, preconditioner=None, callback=None): @@ -121,13 +122,13 @@ class ConjugateGradient(Loggable, object): r = b - A(x0) d = self.preconditioner(r) - previous_gamma = r.vdot(d) + previous_gamma = (r.vdot(d)).real if previous_gamma == 0: self.logger.info("The starting guess is already perfect solution " "for the inverse problem.") return x0, self.convergence_level+1 - norm_b = np.sqrt(b.vdot(b)) - x = x0 + norm_b = np.sqrt((b.vdot(b)).real) + x = x0.copy() convergence = 0 iteration_number = 1 self.logger.info("Starting conjugate gradient.") @@ -137,7 +138,7 @@ class ConjugateGradient(Loggable, object): self.callback(x, iteration_number) q = A(d) - alpha = previous_gamma/d.vdot(q) + alpha = previous_gamma/d.vdot(q).real if not np.isfinite(alpha): self.logger.error("Alpha became infinite! Stopping.") @@ -146,7 +147,7 @@ class ConjugateGradient(Loggable, object): x += d * alpha reset = False - if alpha.real < 0: + if alpha < 0: self.logger.warn("Positive definiteness of A violated!") reset = True if self.reset_count is not None: @@ -156,24 +157,26 @@ class ConjugateGradient(Loggable, object): r = b - A(x) else: r -= q * alpha + #tmp=r.val.get_full_data() + #tmp.imag=0. + #r=Field(r.domain,val=tmp) s = self.preconditioner(r) - gamma = r.vdot(s) + gamma = r.vdot(s).real - if gamma.real < 0: + if gamma < 0: self.logger.warn("Positive definitness of preconditioner " "violated!") beta = max(0, gamma/previous_gamma) + print "beta:",beta delta = np.sqrt(gamma)/norm_b + print "delta:",delta self.logger.debug("Iteration : %08u alpha = %3.1E " "beta = %3.1E delta = %3.1E" % - (iteration_number, - np.real(alpha), - np.real(beta), - np.real(delta))) + (iteration_number, alpha, beta, delta)) if gamma == 0: convergence = self.convergence_level+1 @@ -196,6 +199,7 @@ class ConjugateGradient(Loggable, object): d = s + d * beta + print "iter:",iteration_number iteration_number += 1 previous_gamma = gamma