Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Neel Shah
NIFTy
Commits
ef14fbcd
Commit
ef14fbcd
authored
Sep 28, 2017
by
Theo Steininger
Browse files
Fixed NonlinearConjugateGradient
parent
ca2c5d10
Changes
6
Hide whitespace changes
Inline
Side-by-side
nifty/minimization/__init__.py
View file @
ef14fbcd
...
...
@@ -20,7 +20,7 @@ from .line_searching import *
from
.iteration_controlling
import
*
from
.minimizer
import
Minimizer
from
.conjugate_gradient
import
ConjugateGradient
from
.nonlinear_c
g
import
NonlinearC
G
from
.nonlinear_c
onjugate_gradient
import
NonlinearC
onjugateGradient
from
.descent_minimizer
import
DescentMinimizer
from
.steepest_descent
import
SteepestDescent
from
.vl_bfgs
import
VL_BFGS
...
...
nifty/minimization/conjugate_gradient.py
View file @
ef14fbcd
...
...
@@ -70,7 +70,7 @@ class ConjugateGradient(Minimizer):
"""
controller
=
self
.
_controller
status
=
controller
.
reset
(
energy
)
controller
.
reset
(
energy
)
r
=
-
energy
.
gradient
previous_gamma
=
np
.
inf
...
...
@@ -93,7 +93,7 @@ class ConjugateGradient(Minimizer):
d
=
s
+
d
*
max
(
0
,
gamma
/
previous_gamma
)
previous_gamma
=
gamma
status
=
self
.
_
controller
.
check
(
energy
)
status
=
controller
.
check
(
energy
)
if
status
!=
controller
.
CONTINUE
:
return
energy
,
status
...
...
nifty/minimization/descent_minimizer.py
View file @
ef14fbcd
...
...
@@ -76,7 +76,7 @@ class DescentMinimizer(Minimizer):
f_k_minus_1
=
None
controller
=
self
.
_controller
self
.
_
controller
.
reset
(
energy
)
controller
.
reset
(
energy
)
while
True
:
status
=
controller
.
check
(
energy
)
...
...
nifty/minimization/line_searching/line_search_strong_wolfe.py
View file @
ef14fbcd
...
...
@@ -109,7 +109,8 @@ class LineSearchStrongWolfe(LineSearch):
phi_0
=
le_0
.
value
phiprime_0
=
le_0
.
directional_derivative
if
phiprime_0
>=
0
:
self
.
logger
.
error
(
"Input direction must be a descent direction"
)
self
.
logger
.
error
(
"Input direction must be a descent direction. "
"Gradient: %f"
%
phiprime_0
)
raise
RuntimeError
# set alphas
...
...
nifty/minimization/nonlinear_c
g
.py
→
nifty/minimization/nonlinear_c
onjugate_gradient
.py
View file @
ef14fbcd
...
...
@@ -18,11 +18,13 @@
from
__future__
import
division
import
numpy
as
np
from
.minimizer
import
Minimizer
from
.line_searching
import
LineSearchStrongWolfe
class
NonlinearC
G
(
Minimizer
):
class
NonlinearC
onjugateGradient
(
Minimizer
):
""" Implementation of the nonlinear Conjugate Gradient scheme according to
Polak-Ribiere.
...
...
@@ -61,23 +63,28 @@ class NonlinearCG(Minimizer):
"""
controller
=
self
.
_controller
status
=
controller
.
start
(
energy
)
if
status
!=
controller
.
CONTINUE
:
return
energy
,
status
f_k_minus_1
=
None
controller
.
reset
(
energy
)
p
=
-
energy
.
gradient
f_k_minus_1
=
None
grad_old
=
np
.
inf
# this is a dummy initialization which doesn't hurt since
# energy.gradient must be computed anyway and beta will safely evaluate
# to 0 in the firstiteration
p
=
abs
(
energy
.
gradient
)
while
True
:
grad_old
=
energy
.
gradient
f_k
=
energy
.
value
energy
=
self
.
_line_searcher
.
perform_line_search
(
energy
,
p
,
f_k_minus_1
)
f_k_minus_1
=
f_k
status
=
self
.
_controller
.
check
(
energy
)
status
=
controller
.
check
(
energy
)
if
status
!=
controller
.
CONTINUE
:
return
energy
,
status
grad_new
=
energy
.
gradient
gnnew
=
energy
.
gradient_norm
beta
=
gnnew
*
gnnew
/
(
grad_new
-
grad_old
).
vdot
(
p
).
real
p
=
beta
*
p
-
grad_new
grad_old
=
energy
.
gradient
f_k
=
energy
.
value
energy
=
self
.
_line_searcher
.
perform_line_search
(
energy
,
p
,
f_k_minus_1
)
f_k_minus_1
=
f_k
test/test_minimization/test_minimizers.py
View file @
ef14fbcd
...
...
@@ -3,6 +3,8 @@ import unittest
import
numpy
as
np
from
numpy.testing
import
assert_allclose
import
d2o
import
nifty
as
ift
from
itertools
import
product
...
...
@@ -10,21 +12,21 @@ from test.common import expand
spaces
=
[
ift
.
RGSpace
([
1024
],
distances
=
0.123
),
ift
.
HPSpace
(
32
)]
minimizers
=
[
ift
.
SteepestDescent
,
ift
.
RelaxedNewton
,
ift
.
VL_BFGS
,
ift
.
ConjugateGradient
,
ift
.
NonlinearC
G
]
ift
.
ConjugateGradient
,
ift
.
NonlinearC
onjugateGradient
]
class
Test_Minimizers
(
unittest
.
TestCase
):
@
expand
(
product
(
minimizers
,
spaces
))
def
test_quadratic_minimization
(
self
,
minimizer_class
,
space
):
np
.
random
.
seed
(
42
)
d2o
.
random
.
seed
(
42
)
starting_point
=
ift
.
Field
.
from_random
(
'normal'
,
domain
=
space
)
*
10
covariance_diagonal
=
ift
.
Field
.
from_random
(
'uniform'
,
domain
=
space
)
+
0.5
covariance
=
ift
.
DiagonalOperator
(
space
,
diagonal
=
covariance_diagonal
)
required_result
=
ift
.
Field
(
space
,
val
=
1.
)
IC
=
ift
.
DefaultIteration
Controller
(
tol_abs_gradnorm
=
1e-5
)
IC
=
ift
.
GradientNorm
Controller
(
tol_abs_gradnorm
=
1e-5
)
minimizer
=
minimizer_class
(
controller
=
IC
)
energy
=
ift
.
QuadraticEnergy
(
A
=
covariance
,
b
=
required_result
,
position
=
starting_point
)
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment