Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
N
NIFTy
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Monitor
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
ift
NIFTy
Commits
b92b75fb
There was a problem fetching the pipeline summary.
Commit
b92b75fb
authored
8 years ago
by
Martin Reinecke
Browse files
Options
Downloads
Patches
Plain Diff
CG improvements
parent
0c8a2610
No related branches found
No related tags found
1 merge request
!155
Tweak CG
Pipeline
#
Changes
1
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
nifty/minimization/conjugate_gradient.py
+16
-12
16 additions, 12 deletions
nifty/minimization/conjugate_gradient.py
with
16 additions
and
12 deletions
nifty/minimization/conjugate_gradient.py
+
16
−
12
View file @
b92b75fb
...
@@ -20,6 +20,7 @@ from __future__ import division
...
@@ -20,6 +20,7 @@ from __future__ import division
import
numpy
as
np
import
numpy
as
np
from
keepers
import
Loggable
from
keepers
import
Loggable
from
nifty
import
Field
class
ConjugateGradient
(
Loggable
,
object
):
class
ConjugateGradient
(
Loggable
,
object
):
...
@@ -75,7 +76,7 @@ class ConjugateGradient(Loggable, object):
...
@@ -75,7 +76,7 @@ class ConjugateGradient(Loggable, object):
"""
"""
def
__init__
(
self
,
convergence_tolerance
=
1E-
4
,
convergence_level
=
3
,
def
__init__
(
self
,
convergence_tolerance
=
1E-
8
,
convergence_level
=
3
,
iteration_limit
=
None
,
reset_count
=
None
,
iteration_limit
=
None
,
reset_count
=
None
,
preconditioner
=
None
,
callback
=
None
):
preconditioner
=
None
,
callback
=
None
):
...
@@ -121,13 +122,13 @@ class ConjugateGradient(Loggable, object):
...
@@ -121,13 +122,13 @@ class ConjugateGradient(Loggable, object):
r
=
b
-
A
(
x0
)
r
=
b
-
A
(
x0
)
d
=
self
.
preconditioner
(
r
)
d
=
self
.
preconditioner
(
r
)
previous_gamma
=
r
.
vdot
(
d
)
previous_gamma
=
(
r
.
vdot
(
d
)
).
real
if
previous_gamma
==
0
:
if
previous_gamma
==
0
:
self
.
logger
.
info
(
"
The starting guess is already perfect solution
"
self
.
logger
.
info
(
"
The starting guess is already perfect solution
"
"
for the inverse problem.
"
)
"
for the inverse problem.
"
)
return
x0
,
self
.
convergence_level
+
1
return
x0
,
self
.
convergence_level
+
1
norm_b
=
np
.
sqrt
(
b
.
vdot
(
b
))
norm_b
=
np
.
sqrt
(
(
b
.
vdot
(
b
))
.
real
)
x
=
x0
x
=
x0
.
copy
()
convergence
=
0
convergence
=
0
iteration_number
=
1
iteration_number
=
1
self
.
logger
.
info
(
"
Starting conjugate gradient.
"
)
self
.
logger
.
info
(
"
Starting conjugate gradient.
"
)
...
@@ -137,7 +138,7 @@ class ConjugateGradient(Loggable, object):
...
@@ -137,7 +138,7 @@ class ConjugateGradient(Loggable, object):
self
.
callback
(
x
,
iteration_number
)
self
.
callback
(
x
,
iteration_number
)
q
=
A
(
d
)
q
=
A
(
d
)
alpha
=
previous_gamma
/
d
.
vdot
(
q
)
alpha
=
previous_gamma
/
d
.
vdot
(
q
)
.
real
if
not
np
.
isfinite
(
alpha
):
if
not
np
.
isfinite
(
alpha
):
self
.
logger
.
error
(
"
Alpha became infinite! Stopping.
"
)
self
.
logger
.
error
(
"
Alpha became infinite! Stopping.
"
)
...
@@ -146,7 +147,7 @@ class ConjugateGradient(Loggable, object):
...
@@ -146,7 +147,7 @@ class ConjugateGradient(Loggable, object):
x
+=
d
*
alpha
x
+=
d
*
alpha
reset
=
False
reset
=
False
if
alpha
.
real
<
0
:
if
alpha
<
0
:
self
.
logger
.
warn
(
"
Positive definiteness of A violated!
"
)
self
.
logger
.
warn
(
"
Positive definiteness of A violated!
"
)
reset
=
True
reset
=
True
if
self
.
reset_count
is
not
None
:
if
self
.
reset_count
is
not
None
:
...
@@ -156,24 +157,26 @@ class ConjugateGradient(Loggable, object):
...
@@ -156,24 +157,26 @@ class ConjugateGradient(Loggable, object):
r
=
b
-
A
(
x
)
r
=
b
-
A
(
x
)
else
:
else
:
r
-=
q
*
alpha
r
-=
q
*
alpha
#tmp=r.val.get_full_data()
#tmp.imag=0.
#r=Field(r.domain,val=tmp)
s
=
self
.
preconditioner
(
r
)
s
=
self
.
preconditioner
(
r
)
gamma
=
r
.
vdot
(
s
)
gamma
=
r
.
vdot
(
s
)
.
real
if
gamma
.
real
<
0
:
if
gamma
<
0
:
self
.
logger
.
warn
(
"
Positive definitness of preconditioner
"
self
.
logger
.
warn
(
"
Positive definitness of preconditioner
"
"
violated!
"
)
"
violated!
"
)
beta
=
max
(
0
,
gamma
/
previous_gamma
)
beta
=
max
(
0
,
gamma
/
previous_gamma
)
print
"
beta:
"
,
beta
delta
=
np
.
sqrt
(
gamma
)
/
norm_b
delta
=
np
.
sqrt
(
gamma
)
/
norm_b
print
"
delta:
"
,
delta
self
.
logger
.
debug
(
"
Iteration : %08u alpha = %3.1E
"
self
.
logger
.
debug
(
"
Iteration : %08u alpha = %3.1E
"
"
beta = %3.1E delta = %3.1E
"
%
"
beta = %3.1E delta = %3.1E
"
%
(
iteration_number
,
(
iteration_number
,
alpha
,
beta
,
delta
))
np
.
real
(
alpha
),
np
.
real
(
beta
),
np
.
real
(
delta
)))
if
gamma
==
0
:
if
gamma
==
0
:
convergence
=
self
.
convergence_level
+
1
convergence
=
self
.
convergence_level
+
1
...
@@ -196,6 +199,7 @@ class ConjugateGradient(Loggable, object):
...
@@ -196,6 +199,7 @@ class ConjugateGradient(Loggable, object):
d
=
s
+
d
*
beta
d
=
s
+
d
*
beta
print
"
iter:
"
,
iteration_number
iteration_number
+=
1
iteration_number
+=
1
previous_gamma
=
gamma
previous_gamma
=
gamma
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment