Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Neel Shah
NIFTy
Commits
d244790a
Commit
d244790a
authored
Jun 09, 2021
by
Jakob Knollmüller
Browse files
towards IC for ADVIOptimizer
parent
060f33ff
Changes
5
Hide whitespace changes
Inline
Side-by-side
demos/meanfield_inference.py
View file @
d244790a
...
...
@@ -54,8 +54,8 @@ if __name__ == "__main__":
position_fc
=
ift
.
from_random
(
H
.
domain
)
*
0.1
position_mf
=
ift
.
from_random
(
H
.
domain
)
*
0.1
fc
=
ift
.
FullCovarianceVI
(
position_fc
,
H
,
3
,
True
,
initial_sig
=
0.01
)
mf
=
ift
.
MeanFieldVI
(
position_mf
,
H
,
3
,
True
,
initial_sig
=
0.01
)
fc
=
ift
.
library
.
variational_models
.
FullCovarianceVI
(
position_fc
,
H
,
3
,
True
,
initial_sig
=
0.01
)
mf
=
ift
.
library
.
variational_models
.
MeanFieldVI
(
position_mf
,
H
,
3
,
True
,
initial_sig
=
0.01
)
minimizer_fc
=
ift
.
ADVIOptimizer
(
20
,
eta
=
0.1
)
minimizer_mf
=
ift
.
ADVIOptimizer
(
10
)
...
...
src/minimization/iteration_controllers.py
View file @
d244790a
...
...
@@ -421,3 +421,79 @@ class AbsDeltaEnergyController(IterationController):
return
self
.
CONVERGED
return
self
.
CONTINUE
class
StochasticAbsDeltaEnergyController
(
IterationController
):
"""An iteration controller checking the standard deviation over a
period of iterations. Convergence is reported once this quantity
falls below the given threshold
Parameters
----------
deltaE : float
If the standard deviation of the last energies is below this
value, the convergence counter will be increased in this iteration.
convergence_level : int, default=1
The number which the convergence counter must reach before the
iteration is considered to be converged
iteration_limit : int, optional
The maximum number of iterations that will be carried out.
name : str, optional
If supplied, this string and some diagnostic information will be
printed after every iteration.
memory_length : int, default=10
The number of last energies considered for determining convergence.
"""
def
__init__
(
self
,
deltaE
,
convergence_level
=
1
,
iteration_limit
=
None
,
name
=
None
,
memory_length
=
10
):
super
(
AbsDeltaEnergyController
,
self
).
__init__
()
self
.
_deltaE
=
deltaE
self
.
_convergence_level
=
convergence_level
self
.
_iteration_limit
=
iteration_limit
self
.
_name
=
name
self
.
memory_length
=
memory_length
@
append_history
def
start
(
self
,
energy
):
self
.
_itcount
=
-
1
self
.
_ccount
=
0
self
.
_memory
=
[]
return
self
.
check
(
energy
)
@
append_history
def
check
(
self
,
energy
):
self
.
_itcount
+=
1
inclvl
=
False
Eval
=
energy
.
value
self
.
_memory
.
append
(
Eval
)
if
len
(
self
.
_memory
>
self
.
memory_length
):
self
.
_memory
=
self
.
_memory
[
1
:]
diff
=
np
.
std
(
self
.
_memory
)
if
self
.
_itcount
>
0
:
if
diff
<
self
.
_deltaE
:
inclvl
=
True
if
inclvl
:
self
.
_ccount
+=
1
else
:
self
.
_ccount
=
max
(
0
,
self
.
_ccount
-
1
)
# report
if
self
.
_name
is
not
None
:
logger
.
info
(
"{}: Iteration #{} energy={:.6E} diff={:.6E} crit={:.1E} clvl={}"
.
format
(
self
.
_name
,
self
.
_itcount
,
Eval
,
diff
,
self
.
_deltaE
,
self
.
_ccount
))
# Are we done?
if
self
.
_iteration_limit
is
not
None
:
if
self
.
_itcount
>=
self
.
_iteration_limit
:
logger
.
warning
(
"{} Iteration limit reached. Assuming convergence"
.
format
(
""
if
self
.
_name
is
None
else
self
.
_name
+
": "
))
return
self
.
CONVERGED
if
self
.
_ccount
>=
self
.
_convergence_level
:
return
self
.
CONVERGED
return
self
.
CONTINUE
src/minimization/kl_energies.py
View file @
d244790a
# This program is free software: you can redistribute it and/or modify
`
~
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
...
...
src/minimization/stochastic_minimizer.py
View file @
d244790a
...
...
@@ -36,16 +36,21 @@ class ADVIOptimizer(Minimizer):
This quantity prevents division by zero.
epsilon: positive float
A small value guarantees Robbins and Monro conditions.
resample: bool
Whether the loss function is resampled for the next iteration.
Stochastic losses require resampleing, deterministic ones not.
"""
def
__init__
(
self
,
steps
,
eta
=
1
,
alpha
=
0.1
,
tau
=
1
,
epsilon
=
1e-16
):
def
__init__
(
self
,
controller
,
eta
=
1
,
alpha
=
0.1
,
tau
=
1
,
epsilon
=
1e-16
,
resample
=
True
):
self
.
alpha
=
alpha
self
.
eta
=
eta
self
.
tau
=
tau
self
.
epsilon
=
epsilon
self
.
counter
=
1
self
.
steps
=
steps
self
.
_controller
=
controller
# self.steps = steps
self
.
s
=
None
self
.
resample
=
resample
def
_step
(
self
,
position
,
gradient
):
self
.
s
=
self
.
alpha
*
gradient
**
2
+
(
1
-
self
.
alpha
)
*
self
.
s
...
...
@@ -57,15 +62,30 @@ class ADVIOptimizer(Minimizer):
def
__call__
(
self
,
E
):
from
..utilities
import
myassert
controller
=
self
.
_controller
status
=
controller
.
start
(
energy
)
if
status
!=
controller
.
CONTINUE
:
return
energy
,
status
if
self
.
s
is
None
:
self
.
s
=
E
.
gradient
**
2
# FIXME come up with somthing how to determine convergence
convergence
=
0
for
i
in
range
(
self
.
steps
):
while
True
:
# check if position is at a flat point
if
energy
.
gradient_norm
==
0
:
return
energy
,
controller
.
CONVERGED
x
=
self
.
_step
(
E
.
position
,
E
.
gradient
)
E
=
E
.
resample_at
(
x
)
if
self
.
resample
:
E
=
E
.
resample_at
(
x
)
myassert
(
isinstance
(
E
,
Energy
))
myassert
(
x
.
domain
is
E
.
position
.
domain
)
energy
=
new_energy
status
=
self
.
_controller
.
check
(
energy
)
if
status
!=
controller
.
CONTINUE
:
return
energy
,
status
return
E
,
convergence
def
reset
(
self
):
...
...
test/test_minimizers.py
View file @
d244790a
...
...
@@ -44,7 +44,7 @@ quadratic_only_minimizers = [
'ift.ConjugateGradient(IC)'
,
'ift.minimization.scipy_minimizer._ScipyCG(tol=1e-5, maxiter=300)'
]
slow_minimizers
=
[
'ift.SteepestDescent(IC)'
]
slow_minimizers
=
[
'ift.SteepestDescent(IC)'
,
'ift.ADVIOptimizer(10, resample=False)'
]
@
pmp
(
'minimizer'
,
minimizers
+
newton_minimizers
+
quadratic_only_minimizers
+
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment