Commit 93f178d9 authored by Jakob Knollmüller's avatar Jakob Knollmüller
Browse files

should be working?

parent 0c869870
Pipeline #103173 passed with stages
in 13 minutes and 52 seconds
......@@ -56,8 +56,9 @@ if __name__ == "__main__":
fc = ift.library.variational_models.FullCovarianceVI(position_fc, H, 3, True, initial_sig=0.01)
mf = ift.library.variational_models.MeanFieldVI(position_mf, H, 3, True, initial_sig=0.01)
minimizer_fc = ift.ADVIOptimizer(20, eta=0.1)
minimizer_mf = ift.ADVIOptimizer(10)
IC = ift.StochasticAbsDeltaEnergyController(10,iteration_limit=1000, name='optimizer')
minimizer_fc = ift.ADVIOptimizer(IC, eta=0.1)
minimizer_mf = ift.ADVIOptimizer(IC)
niter = 25
for ii in range(niter):
......
......@@ -61,7 +61,7 @@ from .probing import probe_with_posterior_samples, probe_diagonal, \
from .minimization.line_search import LineSearch
from .minimization.iteration_controllers import (
IterationController, GradientNormController, DeltaEnergyController,
GradInfNormController, AbsDeltaEnergyController)
GradInfNormController, AbsDeltaEnergyController, StochasticAbsDeltaEnergyController)
from .minimization.minimizer import Minimizer
from .minimization.conjugate_gradient import ConjugateGradient
from .minimization.nonlinear_cg import NonlinearCG
......
......@@ -116,6 +116,7 @@ class StochasticEnergyAdapter(Energy):
self._comm = comm
self._local_ops = local_ops
self._n_samples = n_samples
self._nanisinf = nanisinf
lin = Linearization.make_var(position)
v, g = [], []
for lop in self._local_ops:
......
......@@ -447,7 +447,7 @@ class StochasticAbsDeltaEnergyController(IterationController):
def __init__(self, deltaE, convergence_level=1, iteration_limit=None,
name=None, memory_length=10):
super(AbsDeltaEnergyController, self).__init__()
super(StochasticAbsDeltaEnergyController, self).__init__()
self._deltaE = deltaE
self._convergence_level = convergence_level
self._iteration_limit = iteration_limit
......@@ -468,7 +468,7 @@ class StochasticAbsDeltaEnergyController(IterationController):
inclvl = False
Eval = energy.value
self._memory.append(Eval)
if len(self._memory>self.memory_length):
if len(self._memory)>self.memory_length:
self._memory = self._memory[1:]
diff = np.std(self._memory)
if self._itcount > 0:
......
......@@ -60,7 +60,7 @@ class ADVIOptimizer(Minimizer):
self.counter += 1
return new_position
def __call__(self, E):
def __call__(self, energy):
from ..utilities import myassert
controller = self._controller
......@@ -69,25 +69,23 @@ class ADVIOptimizer(Minimizer):
return energy, status
if self.s is None:
self.s = E.gradient ** 2
self.s = energy.gradient ** 2
while True:
# check if position is at a flat point
if energy.gradient_norm == 0:
return energy, controller.CONVERGED
x = self._step(E.position, E.gradient)
x = self._step(energy.position, energy.gradient)
if self.resample:
E = E.resample_at(x)
myassert(isinstance(E, Energy))
myassert(x.domain is E.position.domain)
energy = energy.resample_at(x)
myassert(isinstance(energy, Energy))
myassert(x.domain is energy.position.domain)
energy = new_energy
energy = energy.at(x)
status = self._controller.check(energy)
if status != controller.CONTINUE:
return energy, status
return E, convergence
def reset(self):
self.counter = 1
self.s = None
......@@ -25,7 +25,7 @@ from .common import setup_function, teardown_function
pmp = pytest.mark.parametrize
IC = ift.GradientNormController(tol_abs_gradnorm=1e-5, iteration_limit=1000)
SIC = ift.StochasticAbsDeltaEnergyController(1e-8, iteration_limit=100000)
spaces = [ift.RGSpace([1024], distances=0.123), ift.HPSpace(32)]
minimizers = [
......@@ -44,7 +44,7 @@ quadratic_only_minimizers = [
'ift.ConjugateGradient(IC)',
'ift.minimization.scipy_minimizer._ScipyCG(tol=1e-5, maxiter=300)'
]
slow_minimizers = ['ift.SteepestDescent(IC)', 'ift.ADVIOptimizer(10, resample=False)']
slow_minimizers = ['ift.SteepestDescent(IC)', 'ift.ADVIOptimizer(SIC, resample=False)']
@pmp('minimizer', minimizers + newton_minimizers + quadratic_only_minimizers +
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment