Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
ift
NIFTy
Commits
5f7a31f4
Commit
5f7a31f4
authored
Dec 18, 2017
by
Martin Reinecke
Browse files
print diagnostic messages if anything goes wrong during minimization
parent
2216ae20
Pipeline
#23015
passed with stage
in 4 minutes and 40 seconds
Changes
5
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
nifty/minimization/conjugate_gradient.py
View file @
5f7a31f4
...
@@ -18,7 +18,7 @@
...
@@ -18,7 +18,7 @@
from
__future__
import
division
from
__future__
import
division
from
.minimizer
import
Minimizer
from
.minimizer
import
Minimizer
from
..
import
Field
from
..
import
Field
,
dobj
from
..utilities
import
general_axpy
from
..utilities
import
general_axpy
...
@@ -80,10 +80,12 @@ class ConjugateGradient(Minimizer):
...
@@ -80,10 +80,12 @@ class ConjugateGradient(Minimizer):
q
=
energy
.
curvature
(
d
)
q
=
energy
.
curvature
(
d
)
ddotq
=
d
.
vdot
(
q
).
real
ddotq
=
d
.
vdot
(
q
).
real
if
ddotq
==
0.
:
if
ddotq
==
0.
:
dobj
.
mprint
(
"Error: ConjugateGradient: ddotq==0."
)
return
energy
,
controller
.
ERROR
return
energy
,
controller
.
ERROR
alpha
=
previous_gamma
/
ddotq
alpha
=
previous_gamma
/
ddotq
if
alpha
<
0
:
if
alpha
<
0
:
dobj
.
mprint
(
"Error: ConjugateGradient: alpha<0."
)
return
energy
,
controller
.
ERROR
return
energy
,
controller
.
ERROR
general_axpy
(
-
alpha
,
q
,
r
,
out
=
r
)
general_axpy
(
-
alpha
,
q
,
r
,
out
=
r
)
...
@@ -98,8 +100,9 @@ class ConjugateGradient(Minimizer):
...
@@ -98,8 +100,9 @@ class ConjugateGradient(Minimizer):
gamma
=
r
.
vdot
(
s
).
real
gamma
=
r
.
vdot
(
s
).
real
if
gamma
<
0
:
if
gamma
<
0
:
raise
RuntimeError
(
dobj
.
mprint
(
"Positive definiteness of preconditioner violated!"
)
"Positive definiteness of preconditioner violated!"
)
return
energy
,
controller
.
ERROR
if
gamma
==
0
:
if
gamma
==
0
:
return
energy
,
controller
.
CONVERGED
return
energy
,
controller
.
CONVERGED
...
...
nifty/minimization/descent_minimizer.py
View file @
5f7a31f4
...
@@ -20,6 +20,7 @@ from __future__ import division
...
@@ -20,6 +20,7 @@ from __future__ import division
import
abc
import
abc
from
.minimizer
import
Minimizer
from
.minimizer
import
Minimizer
from
.line_search_strong_wolfe
import
LineSearchStrongWolfe
from
.line_search_strong_wolfe
import
LineSearchStrongWolfe
from
..
import
dobj
class
DescentMinimizer
(
Minimizer
):
class
DescentMinimizer
(
Minimizer
):
...
@@ -89,9 +90,12 @@ class DescentMinimizer(Minimizer):
...
@@ -89,9 +90,12 @@ class DescentMinimizer(Minimizer):
f_k_minus_1
=
energy
.
value
f_k_minus_1
=
energy
.
value
if
new_energy
.
value
>
energy
.
value
:
if
new_energy
.
value
>
energy
.
value
:
dobj
.
mprint
(
"Error: Energy has increased"
)
return
energy
,
controller
.
ERROR
return
energy
,
controller
.
ERROR
if
new_energy
.
value
==
energy
.
value
:
if
new_energy
.
value
==
energy
.
value
:
dobj
.
mprint
(
"Warning: Energy has not changed. Assuming convergence..."
)
return
new_energy
,
controller
.
CONVERGED
return
new_energy
,
controller
.
CONVERGED
energy
=
new_energy
energy
=
new_energy
...
...
nifty/minimization/gradient_norm_controller.py
View file @
5f7a31f4
...
@@ -70,6 +70,8 @@ class GradientNormController(IterationController):
...
@@ -70,6 +70,8 @@ class GradientNormController(IterationController):
# Are we done?
# Are we done?
if
self
.
_iteration_limit
is
not
None
:
if
self
.
_iteration_limit
is
not
None
:
if
self
.
_itcount
>=
self
.
_iteration_limit
:
if
self
.
_itcount
>=
self
.
_iteration_limit
:
dobj
.
mprint
(
"Warning:Iteration limit reached. Assuming convergence"
)
return
self
.
CONVERGED
return
self
.
CONVERGED
if
self
.
_ccount
>=
self
.
_convergence_level
:
if
self
.
_ccount
>=
self
.
_convergence_level
:
return
self
.
CONVERGED
return
self
.
CONVERGED
...
...
nifty/minimization/line_search_strong_wolfe.py
View file @
5f7a31f4
...
@@ -105,6 +105,7 @@ class LineSearchStrongWolfe(LineSearch):
...
@@ -105,6 +105,7 @@ class LineSearchStrongWolfe(LineSearch):
phi_0
=
le_0
.
value
phi_0
=
le_0
.
value
phiprime_0
=
le_0
.
directional_derivative
phiprime_0
=
le_0
.
directional_derivative
if
phiprime_0
>=
0
:
if
phiprime_0
>=
0
:
dobj
.
mprint
(
"Error: search direction is not a descent direction"
)
raise
RuntimeError
(
"search direction must be a descent direction"
)
raise
RuntimeError
(
"search direction must be a descent direction"
)
# set alphas
# set alphas
...
...
nifty/operators/inversion_enabler.py
View file @
5f7a31f4
...
@@ -17,7 +17,8 @@
...
@@ -17,7 +17,8 @@
# and financially supported by the Studienstiftung des deutschen Volkes.
# and financially supported by the Studienstiftung des deutschen Volkes.
from
..minimization.quadratic_energy
import
QuadraticEnergy
from
..minimization.quadratic_energy
import
QuadraticEnergy
from
..field
import
Field
from
..minimization.iteration_controller
import
IterationController
from
..field
import
Field
,
dobj
class
InversionEnabler
(
object
):
class
InversionEnabler
(
object
):
...
@@ -30,7 +31,9 @@ class InversionEnabler(object):
...
@@ -30,7 +31,9 @@ class InversionEnabler(object):
def
_operation
(
self
,
x
,
op
,
tdom
):
def
_operation
(
self
,
x
,
op
,
tdom
):
x0
=
Field
.
zeros
(
tdom
,
dtype
=
x
.
dtype
)
x0
=
Field
.
zeros
(
tdom
,
dtype
=
x
.
dtype
)
energy
=
QuadraticEnergy
(
A
=
op
,
b
=
x
,
position
=
x0
)
energy
=
QuadraticEnergy
(
A
=
op
,
b
=
x
,
position
=
x0
)
r
=
self
.
_inverter
(
energy
,
preconditioner
=
self
.
_preconditioner
)[
0
]
r
,
stat
=
self
.
_inverter
(
energy
,
preconditioner
=
self
.
_preconditioner
)
if
stat
!=
IterationController
.
CONVERGED
:
dobj
.
mprint
(
"Error detected during operator inversion"
)
return
r
.
position
return
r
.
position
def
_times
(
self
,
x
):
def
_times
(
self
,
x
):
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment