Commit b92b75fb authored by Martin Reinecke's avatar Martin Reinecke

CG improvements

parent 0c8a2610
Pipeline #14125 canceled with stage
in 3 minutes and 17 seconds
......@@ -20,6 +20,7 @@ from __future__ import division
import numpy as np
from keepers import Loggable
from nifty import Field
class ConjugateGradient(Loggable, object):
......@@ -75,7 +76,7 @@ class ConjugateGradient(Loggable, object):
"""
def __init__(self, convergence_tolerance=1E-4, convergence_level=3,
def __init__(self, convergence_tolerance=1E-8, convergence_level=3,
iteration_limit=None, reset_count=None,
preconditioner=None, callback=None):
......@@ -121,13 +122,13 @@ class ConjugateGradient(Loggable, object):
r = b - A(x0)
d = self.preconditioner(r)
previous_gamma = r.vdot(d)
previous_gamma = (r.vdot(d)).real
if previous_gamma == 0:
self.logger.info("The starting guess is already perfect solution "
"for the inverse problem.")
return x0, self.convergence_level+1
norm_b = np.sqrt(b.vdot(b))
x = x0
norm_b = np.sqrt((b.vdot(b)).real)
x = x0.copy()
convergence = 0
iteration_number = 1
self.logger.info("Starting conjugate gradient.")
......@@ -137,7 +138,7 @@ class ConjugateGradient(Loggable, object):
self.callback(x, iteration_number)
q = A(d)
alpha = previous_gamma/d.vdot(q)
alpha = previous_gamma/d.vdot(q).real
if not np.isfinite(alpha):
self.logger.error("Alpha became infinite! Stopping.")
......@@ -146,7 +147,7 @@ class ConjugateGradient(Loggable, object):
x += d * alpha
reset = False
if alpha.real < 0:
if alpha < 0:
self.logger.warn("Positive definiteness of A violated!")
reset = True
if self.reset_count is not None:
......@@ -156,24 +157,26 @@ class ConjugateGradient(Loggable, object):
r = b - A(x)
else:
r -= q * alpha
#tmp=r.val.get_full_data()
#tmp.imag=0.
#r=Field(r.domain,val=tmp)
s = self.preconditioner(r)
gamma = r.vdot(s)
gamma = r.vdot(s).real
if gamma.real < 0:
if gamma < 0:
self.logger.warn("Positive definitness of preconditioner "
"violated!")
beta = max(0, gamma/previous_gamma)
print "beta:",beta
delta = np.sqrt(gamma)/norm_b
print "delta:",delta
self.logger.debug("Iteration : %08u alpha = %3.1E "
"beta = %3.1E delta = %3.1E" %
(iteration_number,
np.real(alpha),
np.real(beta),
np.real(delta)))
(iteration_number, alpha, beta, delta))
if gamma == 0:
convergence = self.convergence_level+1
......@@ -196,6 +199,7 @@ class ConjugateGradient(Loggable, object):
d = s + d * beta
print "iter:",iteration_number
iteration_number += 1
previous_gamma = gamma
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment