Commit 816d3fab authored by Martin Reinecke's avatar Martin Reinecke

simplifications

parent 2e434668
......@@ -78,8 +78,7 @@ if __name__ == '__main__':
# Minimize the Hamiltonian
H = ift.Hamiltonian(likelihood, ic_sampling)
H = ift.EnergyAdapter(position, H)
H = H.make_invertible(ic_cg)
H = ift.EnergyAdapter(position, H, ic_cg)
# minimizer = ift.SteepestDescent(ic_newton)
H, convergence = minimizer(H)
......
......@@ -94,8 +94,7 @@ if __name__ == '__main__':
# Minimize the Hamiltonian
H = ift.Hamiltonian(likelihood)
H = ift.EnergyAdapter(position, H)
H = H.make_invertible(ic_cg)
H = ift.EnergyAdapter(position, H, ic_cg)
H, convergence = minimizer(H)
# Plot results
......
......@@ -96,8 +96,7 @@ if __name__ == '__main__':
for _ in range(N_samples)]
KL = ift.SampledKullbachLeiblerDivergence(H, samples)
KL = ift.EnergyAdapter(position, KL)
KL = KL.make_invertible(ic_cg)
KL = ift.EnergyAdapter(position, KL, ic_cg)
KL, convergence = minimizer(KL)
position = KL.position
......
......@@ -99,8 +99,7 @@ N = ift.DiagonalOperator(ift.from_global_data(d_space, var))
IC = ift.GradientNormController(tol_abs_gradnorm=1e-8)
likelihood = ift.GaussianEnergy(d, N)(R)
H = ift.Hamiltonian(likelihood, IC)
H = ift.EnergyAdapter(params, H)
H = H.make_invertible(IC)
H = ift.EnergyAdapter(params, H, IC)
# Minimize
minimizer = ift.RelaxedNewton(IC)
......
......@@ -19,7 +19,7 @@
from __future__ import absolute_import, division, print_function
from ..compat import *
from ..utilities import NiftyMetaBase, memo
from ..utilities import NiftyMetaBase
class Energy(NiftyMetaBase()):
......@@ -42,18 +42,14 @@ class Energy(NiftyMetaBase()):
this approach, intermediate results from computing e.g. the gradient can
safely be reused for e.g. the value or the metric.
Memorizing the evaluations of some quantities (using the memo decorator)
minimizes the computational effort for multiple calls.
See Also
--------
memo
Memorizing the evaluations of some quantities minimizes the computational
effort for multiple calls.
"""
def __init__(self, position):
super(Energy, self).__init__()
self._position = position
self._gradnorm = None
def at(self, position):
""" Returns a new Energy object, initialized at `position`.
......@@ -97,12 +93,13 @@ class Energy(NiftyMetaBase()):
raise NotImplementedError
@property
@memo
def gradient_norm(self):
"""
float : L2-norm of the gradient at given `position`.
"""
return self.gradient.norm()
if self._gradnorm is None:
self._gradnorm = self.gradient.norm()
return self._gradnorm
@property
def metric(self):
......@@ -128,49 +125,3 @@ class Energy(NiftyMetaBase()):
`dir`. If None, the step size is not limited.
"""
return None
def make_invertible(self, controller, preconditioner=None):
from .iteration_controller import IterationController
if not isinstance(controller, IterationController):
raise TypeError
return MetricInversionEnabler(self, controller, preconditioner)
class MetricInversionEnabler(Energy):
def __init__(self, ene, controller, preconditioner):
super(MetricInversionEnabler, self).__init__(ene.position)
self._energy = ene
self._controller = controller
self._preconditioner = preconditioner
def at(self, position):
return MetricInversionEnabler(
self._energy.at(position), self._controller, self._preconditioner)
@property
def position(self):
return self._energy.position
@property
def value(self):
return self._energy.value
@property
def gradient(self):
return self._energy.gradient
@property
def metric(self):
from ..operators.linear_operator import LinearOperator
from ..operators.inversion_enabler import InversionEnabler
curv = self._energy.metric
if self._preconditioner is None:
precond = None
elif isinstance(self._preconditioner, LinearOperator):
precond = self._preconditioner
elif isinstance(self._preconditioner, Energy):
precond = self._preconditioner.at(self.position).metric
return InversionEnabler(curv, self._controller, precond)
def longest_step(self, dir):
return self._energy.longest_step(dir)
......@@ -7,19 +7,35 @@ import numpy as np
class EnergyAdapter(Energy):
def __init__(self, position, op):
def __init__(self, position, op, controller=None, preconditioner=None):
super(EnergyAdapter, self).__init__(position)
self._op = op
self._val = self._grad = self._metric = None
self._controller = controller
self._preconditioner = preconditioner
def at(self, position):
return EnergyAdapter(position, self._op)
return EnergyAdapter(position, self._op, self._controller,
self._preconditioner)
def _fill_all(self):
tmp = self._op(Linearization.make_var(self._position))
self._val = tmp.val.local_data[()]
self._grad = tmp.gradient
self._metric = tmp.metric
if self._controller is not None:
from ..operators.linear_operator import LinearOperator
from ..operators.inversion_enabler import InversionEnabler
if self._preconditioner is None:
precond = None
elif isinstance(self._preconditioner, LinearOperator):
precond = self._preconditioner
elif isinstance(self._preconditioner, Energy):
precond = self._preconditioner.at(self._position).metric
self._metric = InversionEnabler(tmp._metric, self._controller,
precond)
else:
self._metric = tmp._metric
@property
def value(self):
......
......@@ -25,7 +25,6 @@ from .line_search_strong_wolfe import LineSearchStrongWolfe
class L_BFGS(DescentMinimizer):
def __init__(self, controller, line_searcher=LineSearchStrongWolfe(),
max_history_length=5):
super(L_BFGS, self).__init__(controller=controller,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment