Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
ift
NIFTy
Commits
fbb298cf
Commit
fbb298cf
authored
Aug 20, 2017
by
Martin Reinecke
Browse files
tweaks
parent
db5502f3
Pipeline
#16905
passed with stage
in 10 minutes and 29 seconds
Changes
5
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
nifty/energies/quadratic_energy.py
View file @
fbb298cf
...
...
@@ -8,14 +8,22 @@ class QuadraticEnergy(Energy):
position-independent.
"""
def
__init__
(
self
,
position
,
A
,
b
):
def
__init__
(
self
,
position
,
A
,
b
,
grad
=
None
):
super
(
QuadraticEnergy
,
self
).
__init__
(
position
=
position
)
self
.
_A
=
A
self
.
_b
=
b
if
grad
is
not
None
:
self
.
_Ax
=
grad
+
self
.
_b
else
:
self
.
_Ax
=
self
.
_A
(
self
.
position
)
def
at
(
self
,
position
):
return
self
.
__class__
(
position
=
position
,
A
=
self
.
_A
,
b
=
self
.
_b
)
def
at_with_grad
(
self
,
position
,
grad
):
return
self
.
__class__
(
position
=
position
,
A
=
self
.
_A
,
b
=
self
.
_b
,
grad
=
grad
)
@
property
@
memo
def
value
(
self
):
...
...
@@ -29,8 +37,3 @@ class QuadraticEnergy(Energy):
@
property
def
curvature
(
self
):
return
self
.
_A
@
property
@
memo
def
_Ax
(
self
):
return
self
.
curvature
(
self
.
position
)
nifty/minimization/conjugate_gradient.py
View file @
fbb298cf
...
...
@@ -79,7 +79,7 @@ class ConjugateGradient(Minimizer):
if
self
.
_preconditioner
is
not
None
:
d
=
self
.
_preconditioner
(
r
)
else
:
d
=
r
d
=
r
.
copy
()
previous_gamma
=
(
r
.
vdot
(
d
)).
real
if
previous_gamma
==
0
:
return
energy
,
controller
.
CONVERGED
...
...
@@ -92,11 +92,6 @@ class ConjugateGradient(Minimizer):
return
energy
,
controller
.
ERROR
alpha
=
previous_gamma
/
ddotq
energy
=
energy
.
at
(
energy
.
position
+
d
*
alpha
)
status
=
self
.
_controller
.
check
(
energy
)
if
status
!=
controller
.
CONTINUE
:
return
energy
,
status
reset
=
False
if
alpha
<
0
:
self
.
logger
.
warn
(
"Positive definiteness of A violated!"
)
...
...
@@ -105,9 +100,15 @@ class ConjugateGradient(Minimizer):
reset
+=
(
iteration_number
%
self
.
_reset_count
==
0
)
if
reset
:
self
.
logger
.
info
(
"Resetting conjugate directions."
)
energy
=
energy
.
at
(
energy
.
position
+
d
*
alpha
)
r
=
-
energy
.
gradient
else
:
r
-=
q
*
alpha
energy
=
energy
.
at_with_grad
(
energy
.
position
+
d
*
alpha
,
-
r
)
status
=
self
.
_controller
.
check
(
energy
)
if
status
!=
controller
.
CONTINUE
:
return
energy
,
status
if
self
.
_preconditioner
is
not
None
:
s
=
self
.
_preconditioner
(
r
)
...
...
nifty/minimization/default_iteration_controller.py
View file @
fbb298cf
...
...
@@ -19,30 +19,34 @@
from
.iteration_controller
import
IterationController
class
DefaultIterationController
(
IterationController
):
def
__init__
(
self
,
tol_gradnorm
=
None
,
convergence_level
=
1
,
iteration_limit
=
None
):
def
__init__
(
self
,
tol_gradnorm
=
None
,
tol_rel_gradnorm
=
None
,
convergence_level
=
1
,
iteration_limit
=
None
):
super
(
DefaultIterationController
,
self
).
__init__
()
self
.
_tol_gradnorm
=
tol_gradnorm
self
.
_tol_rel_gradnorm
=
tol_rel_gradnorm
self
.
_convergence_level
=
convergence_level
self
.
_iteration_limit
=
iteration_limit
def
start
(
self
,
energy
):
self
.
_itcount
=
-
1
self
.
_ccount
=
0
if
self
.
_tol_rel_gradnorm
is
not
None
:
self
.
_tol_rel_gradnorm
*=
energy
.
gradient_norm
return
self
.
check
(
energy
)
def
check
(
self
,
energy
):
self
.
_itcount
+=
1
print
"iteration"
,
self
.
_itcount
,
"gradnorm"
,
energy
.
gradient_norm
,
"level"
,
self
.
_ccount
print
"iteration"
,
self
.
_itcount
,
"gradnorm"
,
energy
.
gradient_norm
,
"level"
,
self
.
_ccount
,
self
.
_tol_rel_gradnorm
if
self
.
_iteration_limit
is
not
None
:
if
self
.
_itcount
>=
self
.
_iteration_limit
:
return
self
.
CONVERGED
if
self
.
_tol_gradnorm
is
not
None
:
if
energy
.
gradient_norm
<=
self
.
_tol_gradnorm
:
self
.
_ccount
+=
1
if
self
.
_ccount
>=
self
.
_convergence_level
:
return
self
.
CONVERGED
else
:
self
.
_ccount
=
max
(
0
,
self
.
_ccount
-
1
)
if
self
.
_tol_rel_gradnorm
is
not
None
:
if
energy
.
gradient_norm
<=
self
.
_tol_rel_gradnorm
:
self
.
_ccount
+=
1
if
self
.
_ccount
>=
self
.
_convergence_level
:
return
self
.
CONVERGED
return
self
.
CONTINUE
nifty/minimization/descent_minimizer.py
View file @
fbb298cf
...
...
@@ -75,7 +75,7 @@ class DescentMinimizer(Minimizer):
controller
=
self
.
_controller
status
=
controller
.
start
(
energy
)
if
status
!=
controller
.
CONTINUE
:
return
E
,
status
return
energy
,
status
while
True
:
# check if position is at a flat point
...
...
nifty/minimization/relaxed_newton.py
View file @
fbb298cf
...
...
@@ -22,9 +22,8 @@ from .line_searching import LineSearchStrongWolfe
class
RelaxedNewton
(
DescentMinimizer
):
def
__init__
(
self
,
controller
,
line_searcher
=
LineSearchStrongWolfe
()):
super
(
RelaxedNewton
,
self
).
__init__
(
controller
=
controller
,
line_searcher
=
line_searcher
)
super
(
RelaxedNewton
,
self
).
__init__
(
controller
=
controller
,
line_searcher
=
line_searcher
)
self
.
line_searcher
.
preferred_initial_step_size
=
1.
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment