Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
ift
NIFTy
Commits
042528af
Commit
042528af
authored
Mar 07, 2018
by
Martin Reinecke
Browse files
more tests and solvers; Hestenes-Stiefel still causes problems
parent
f827fba2
Pipeline
#25849
failed with stage
in 26 minutes and 11 seconds
Changes
2
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
nifty4/minimization/nonlinear_cg.py
View file @
042528af
...
...
@@ -25,14 +25,13 @@ class NonlinearCG(Minimizer):
""" Nonlinear Conjugate Gradient scheme according to Polak-Ribiere.
Algorithm 5.4 from Nocedal & Wright.
Eq. (5.41a) has been replaced by eq. (5.49)
Parameters
----------
controller : IterationController
Object that decides when to terminate the minimization.
line_searcher : LineSearch, optional
The line search algorithm to be used
beta_heuristics : str
One of 'Polak-Ribiere', 'Fletcher-Reeves', 'Hestenes-Stiefel'
References
----------
...
...
@@ -40,16 +39,15 @@ class NonlinearCG(Minimizer):
2006, Springer-Verlag New York
"""
def
__init__
(
self
,
controller
,
line_searcher
=
LineSearchStrongWolfe
(
c2
=
0.1
),
beta_heuristics
=
'Polak-Ribiere'
):
def
__init__
(
self
,
controller
,
beta_heuristics
=
'Polak-Ribiere'
):
valid_beta_heuristics
=
[
'Polak-Ribiere'
,
'Fletcher-Reeves'
,
'Hestenes-Stiefel'
]
'Hestenes-Stiefel'
,
"5.49"
]
if
not
(
beta_heuristics
in
valid_beta_heuristics
):
raise
ValueError
(
"beta heuristics must be either 'Polak-Ribiere', "
"'Fletcher-Reeves', or 'Hestenes-Stiefel'"
)
self
.
_beta_heuristic
=
beta_heuristics
self
.
_controller
=
controller
self
.
_line_searcher
=
l
ine
_s
earch
er
self
.
_line_searcher
=
L
ine
S
earch
StrongWolfe
(
c2
=
0.1
)
def
__call__
(
self
,
energy
):
controller
=
self
.
_controller
...
...
@@ -74,14 +72,18 @@ class NonlinearCG(Minimizer):
grad_new
=
energy
.
gradient
if
self
.
_beta_heuristic
==
'Hestenes-Stiefel'
:
# Eq. (5.4
5
) in Nocedal & Wright.
# Eq. (5.4
6
) in Nocedal & Wright.
beta
=
max
(
0.0
,
(
grad_new
.
vdot
(
grad_new
-
grad_old
)
/
(
grad_new
-
grad_old
).
vdot
(
p
)).
real
)
elif
self
.
_beta_heuristic
==
'Polak-Ribiere'
:
# Eq. (5.4
3
) in Nocedal & Wright. (with (5.4
4
) additionally)
# Eq. (5.4
4
) in Nocedal & Wright. (with (5.4
5
) additionally)
beta
=
max
(
0.0
,
(
grad_new
.
vdot
(
grad_new
-
grad_old
)
/
(
grad_old
.
vdot
(
grad_old
))).
real
)
el
se
:
# Eq. (5.4
0
a) in Nocedal & Wright.
el
if
self
.
_beta_heuristic
==
'Fletcher-Reeves'
:
# Eq. (5.4
1
a) in Nocedal & Wright.
beta
=
(
grad_new
.
vdot
(
grad_new
)
/
(
grad_old
.
vdot
(
grad_old
))).
real
else
:
# Eq. (5.49) in Nocedal & Wright.
beta
=
(
grad_new
.
vdot
(
grad_new
)
/
((
grad_new
-
grad_old
).
vdot
(
p
))).
real
p
=
beta
*
p
-
grad_new
test/test_minimization/test_minimizers.py
View file @
042528af
...
...
@@ -24,22 +24,28 @@ from itertools import product
from
test.common
import
expand
from
nose.plugins.skip
import
SkipTest
IC
=
ift
.
GradientNormController
(
tol_abs_gradnorm
=
1e-5
,
iteration_limit
=
1000
)
spaces
=
[
ift
.
RGSpace
([
1024
],
distances
=
0.123
),
ift
.
HPSpace
(
32
)]
minimizers
=
[
ift
.
SteepestDescent
,
ift
.
RelaxedNewton
,
ift
.
VL_BFGS
,
ift
.
ConjugateGradient
,
ift
.
NonlinearCG
,
ift
.
NewtonCG
,
ift
.
L_BFGS_B
]
minimizers2
=
[
ift
.
RelaxedNewton
,
ift
.
VL_BFGS
,
ift
.
NonlinearCG
,
ift
.
NewtonCG
,
ift
.
L_BFGS_B
]
minimizers
=
[
ift
.
VL_BFGS
(
IC
),
ift
.
NonlinearCG
(
IC
,
"Polak-Ribiere"
),
# ift.NonlinearCG(IC, "Hestenes-Stiefel"),
ift
.
NonlinearCG
(
IC
,
"Fletcher-Reeves"
),
ift
.
NonlinearCG
(
IC
,
"5.49"
),
ift
.
NewtonCG
(
IC
),
ift
.
L_BFGS_B
(
IC
)]
minimizers3
=
[
ift
.
SteepestDescent
,
ift
.
RelaxedNewton
,
ift
.
VL_BFGS
,
ift
.
NonlinearCG
,
ift
.
NewtonCG
,
ift
.
L_BFGS_B
]
newton_minimizers
=
[
ift
.
RelaxedNewton
(
IC
)]
quadratic_only_minimizers
=
[
ift
.
ConjugateGradient
(
IC
)]
slow_minimizers
=
[
ift
.
SteepestDescent
(
IC
)]
class
Test_Minimizers
(
unittest
.
TestCase
):
@
expand
(
product
(
minimizers
,
spaces
))
def
test_quadratic_minimization
(
self
,
minimizer_class
,
space
):
@
expand
(
product
(
minimizers
+
newton_minimizers
+
quadratic_only_minimizers
+
slow_minimizers
,
spaces
))
def
test_quadratic_minimization
(
self
,
minimizer
,
space
):
np
.
random
.
seed
(
42
)
starting_point
=
ift
.
Field
.
from_random
(
'normal'
,
domain
=
space
)
*
10
covariance_diagonal
=
ift
.
Field
.
from_random
(
...
...
@@ -47,10 +53,7 @@ class Test_Minimizers(unittest.TestCase):
covariance
=
ift
.
DiagonalOperator
(
covariance_diagonal
)
required_result
=
ift
.
Field
.
ones
(
space
,
dtype
=
np
.
float64
)
IC
=
ift
.
GradientNormController
(
tol_abs_gradnorm
=
1e-5
,
iteration_limit
=
1000
)
try
:
minimizer
=
minimizer_class
(
controller
=
IC
)
energy
=
ift
.
QuadraticEnergy
(
A
=
covariance
,
b
=
required_result
,
position
=
starting_point
)
...
...
@@ -63,8 +66,8 @@ class Test_Minimizers(unittest.TestCase):
1.
/
covariance_diagonal
.
to_global_data
(),
rtol
=
1e-3
,
atol
=
1e-3
)
@
expand
(
product
(
minimizers
2
))
def
test_rosenbrock
(
self
,
minimizer
_class
):
@
expand
(
product
(
minimizers
+
newton_minimizers
))
def
test_rosenbrock
(
self
,
minimizer
):
try
:
from
scipy.optimize
import
rosen
,
rosen_der
,
rosen_hess_prod
except
ImportError
:
...
...
@@ -114,10 +117,7 @@ class Test_Minimizers(unittest.TestCase):
return
ift
.
InversionEnabler
(
RBCurv
(
self
.
_position
),
inverter
=
t2
)
IC
=
ift
.
GradientNormController
(
tol_abs_gradnorm
=
1e-5
,
iteration_limit
=
10000
)
try
:
minimizer
=
minimizer_class
(
controller
=
IC
)
energy
=
RBEnergy
(
position
=
starting_point
)
(
energy
,
convergence
)
=
minimizer
(
energy
)
...
...
@@ -128,10 +128,10 @@ class Test_Minimizers(unittest.TestCase):
assert_allclose
(
energy
.
position
.
to_global_data
(),
1.
,
rtol
=
1e-3
,
atol
=
1e-3
)
@
expand
(
product
(
minimizers
3
))
def
DISABLEDtest_nonlinear
(
self
,
minimizer
_class
):
@
expand
(
product
(
minimizers
+
slow_minimizers
))
def
test_gauss
(
self
,
minimizer
):
space
=
ift
.
UnstructuredDomain
((
1
,))
starting_point
=
ift
.
Field
(
space
,
val
=
5
.
)
starting_point
=
ift
.
Field
(
space
,
val
=
3
.
)
class
ExpEnergy
(
ift
.
Energy
):
def
__init__
(
self
,
position
):
...
...
@@ -154,10 +154,7 @@ class Test_Minimizers(unittest.TestCase):
return
ift
.
DiagonalOperator
(
ift
.
Field
(
self
.
position
.
domain
,
val
=
v
))
IC
=
ift
.
GradientNormController
(
tol_abs_gradnorm
=
1e-10
,
iteration_limit
=
10000
)
try
:
minimizer
=
minimizer_class
(
controller
=
IC
)
energy
=
ExpEnergy
(
position
=
starting_point
)
(
energy
,
convergence
)
=
minimizer
(
energy
)
...
...
@@ -167,3 +164,40 @@ class Test_Minimizers(unittest.TestCase):
assert_equal
(
convergence
,
IC
.
CONVERGED
)
assert_allclose
(
energy
.
position
.
to_global_data
(),
0.
,
atol
=
1e-3
)
@
expand
(
product
(
minimizers
+
newton_minimizers
+
slow_minimizers
))
def
test_cosh
(
self
,
minimizer
):
space
=
ift
.
UnstructuredDomain
((
1
,))
starting_point
=
ift
.
Field
(
space
,
val
=
3.
)
class
CoshEnergy
(
ift
.
Energy
):
def
__init__
(
self
,
position
):
super
(
CoshEnergy
,
self
).
__init__
(
position
)
@
property
def
value
(
self
):
x
=
self
.
position
.
to_global_data
()[
0
]
return
np
.
cosh
(
x
)
@
property
def
gradient
(
self
):
x
=
self
.
position
.
to_global_data
()[
0
]
return
ift
.
Field
(
self
.
position
.
domain
,
val
=
np
.
sinh
(
x
))
@
property
def
curvature
(
self
):
x
=
self
.
position
.
to_global_data
()[
0
]
v
=
np
.
cosh
(
x
)
return
ift
.
DiagonalOperator
(
ift
.
Field
(
self
.
position
.
domain
,
val
=
v
))
try
:
energy
=
CoshEnergy
(
position
=
starting_point
)
(
energy
,
convergence
)
=
minimizer
(
energy
)
except
NotImplementedError
:
raise
SkipTest
assert_equal
(
convergence
,
IC
.
CONVERGED
)
assert_allclose
(
energy
.
position
.
to_global_data
(),
0.
,
atol
=
1e-3
)
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment