Commit 0ae8d90a authored by Martin Reinecke's avatar Martin Reinecke
Browse files

Merge branch 'master' into fix_hermitianizer

parents 054046a4 2a7ee176
Pipeline #14286 failed with stage
in 7 minutes and 44 seconds
...@@ -24,7 +24,7 @@ class WienerFilterEnergy(Energy): ...@@ -24,7 +24,7 @@ class WienerFilterEnergy(Energy):
@property @property
def value(self): def value(self):
D_inv_x = self.D_inverse_x() D_inv_x = self.D_inverse_x()
H = 0.5 * D_inv_x.vdot(self.position) - self.j.dot(self.position) H = 0.5 * D_inv_x.vdot(self.position) - self.j.vdot(self.position)
return H.real return H.real
@property @property
......
...@@ -121,13 +121,13 @@ class ConjugateGradient(Loggable, object): ...@@ -121,13 +121,13 @@ class ConjugateGradient(Loggable, object):
r = b - A(x0) r = b - A(x0)
d = self.preconditioner(r) d = self.preconditioner(r)
previous_gamma = r.vdot(d) previous_gamma = (r.vdot(d)).real
if previous_gamma == 0: if previous_gamma == 0:
self.logger.info("The starting guess is already perfect solution " self.logger.info("The starting guess is already perfect solution "
"for the inverse problem.") "for the inverse problem.")
return x0, self.convergence_level+1 return x0, self.convergence_level+1
norm_b = np.sqrt(b.vdot(b)) norm_b = np.sqrt((b.vdot(b)).real)
x = x0 x = x0.copy()
convergence = 0 convergence = 0
iteration_number = 1 iteration_number = 1
self.logger.info("Starting conjugate gradient.") self.logger.info("Starting conjugate gradient.")
...@@ -137,7 +137,7 @@ class ConjugateGradient(Loggable, object): ...@@ -137,7 +137,7 @@ class ConjugateGradient(Loggable, object):
self.callback(x, iteration_number) self.callback(x, iteration_number)
q = A(d) q = A(d)
alpha = previous_gamma/d.vdot(q) alpha = previous_gamma/d.vdot(q).real
if not np.isfinite(alpha): if not np.isfinite(alpha):
self.logger.error("Alpha became infinite! Stopping.") self.logger.error("Alpha became infinite! Stopping.")
...@@ -146,7 +146,7 @@ class ConjugateGradient(Loggable, object): ...@@ -146,7 +146,7 @@ class ConjugateGradient(Loggable, object):
x += d * alpha x += d * alpha
reset = False reset = False
if alpha.real < 0: if alpha < 0:
self.logger.warn("Positive definiteness of A violated!") self.logger.warn("Positive definiteness of A violated!")
reset = True reset = True
if self.reset_count is not None: if self.reset_count is not None:
...@@ -158,9 +158,9 @@ class ConjugateGradient(Loggable, object): ...@@ -158,9 +158,9 @@ class ConjugateGradient(Loggable, object):
r -= q * alpha r -= q * alpha
s = self.preconditioner(r) s = self.preconditioner(r)
gamma = r.vdot(s) gamma = r.vdot(s).real
if gamma.real < 0: if gamma < 0:
self.logger.warn("Positive definitness of preconditioner " self.logger.warn("Positive definitness of preconditioner "
"violated!") "violated!")
...@@ -170,10 +170,7 @@ class ConjugateGradient(Loggable, object): ...@@ -170,10 +170,7 @@ class ConjugateGradient(Loggable, object):
self.logger.debug("Iteration : %08u alpha = %3.1E " self.logger.debug("Iteration : %08u alpha = %3.1E "
"beta = %3.1E delta = %3.1E" % "beta = %3.1E delta = %3.1E" %
(iteration_number, (iteration_number, alpha, beta, delta))
np.real(alpha),
np.real(beta),
np.real(delta)))
if gamma == 0: if gamma == 0:
convergence = self.convergence_level+1 convergence = self.convergence_level+1
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment