Commit edf74c43 authored by Theo Steininger's avatar Theo Steininger
Browse files

Fixed basic demos.

parent ef14fbcd
import d2o
from nifty import *
import plotly.offline as pl
......@@ -8,7 +11,7 @@ from mpi4py import MPI
comm = MPI.COMM_WORLD
rank = comm.rank
np.random.seed(42)
d2o.random.seed(42)
class AdjointFFTResponse(LinearOperator):
......@@ -40,7 +43,8 @@ class AdjointFFTResponse(LinearOperator):
if __name__ == "__main__":
distribution_strategy = 'not'
nifty_configuration['default_distribution_strategy'] = 'fftw'
nifty_configuration['harmonic_rg_base'] = 'real'
# Set up position space
s_space = RGSpace([128, 128])
......@@ -51,18 +55,16 @@ if __name__ == "__main__":
h_space = fft.target[0]
# Setting up power space
p_space = PowerSpace(h_space, distribution_strategy=distribution_strategy)
p_space = PowerSpace(h_space)
# Choosing the prior correlation structure and defining
# correlation operator
p_spec = (lambda k: (42 / (k + 1) ** 3))
S = create_power_operator(h_space, power_spectrum=p_spec,
distribution_strategy=distribution_strategy)
S = create_power_operator(h_space, power_spectrum=p_spec)
# Drawing a sample sh from the prior distribution in harmonic space
sp = Field(p_space, val=p_spec,
distribution_strategy=distribution_strategy)
sp = Field(p_space, val=p_spec)
sh = sp.power_synthesize(real_signal=True)
ss = fft.adjoint_times(sh)
......@@ -95,9 +97,17 @@ if __name__ == "__main__":
# iteration_limit=50,
# callback=convergence_measure)
minimizer = RelaxedNewton(convergence_tolerance=0,
iteration_limit=1,
callback=convergence_measure)
controller = GradientNormController(iteration_limit=50,
callback=convergence_measure)
minimizer = VL_BFGS(controller=controller)
controller = GradientNormController(iteration_limit=1,
callback=convergence_measure)
minimizer = RelaxedNewton(controller=controller)
#
# minimizer = VL_BFGS(convergence_tolerance=0,
# iteration_limit=50,
......@@ -105,9 +115,6 @@ if __name__ == "__main__":
# max_history_length=3)
#
inverter = ConjugateGradient(convergence_level=3,
convergence_tolerance=1e-5,
preconditioner=None)
# Setting starting position
m0 = Field(h_space, val=.0)
......@@ -116,15 +123,22 @@ if __name__ == "__main__":
D0 = energy.curvature
# Solving the problem analytically
m0 = D0.inverse_times(j)
sample_variance = Field(sh.domain, val=0.)
sample_mean = Field(sh.domain, val=0.)
# sampling the uncertainty map
n_samples = 10
for i in range(n_samples):
sample = fft(sugar.generate_posterior_sample(0., D0))
sample_variance += sample**2
sample_mean += sample
variance = (sample_variance - sample_mean**2)/n_samples
# m0 = D0.inverse_times(j)
m = minimizer(energy)[0].position
plotter = plotting.RG2DPlotter()
plotter(ss, path='signal.html')
plotter(fft.inverse_times(m), path='m.html')
# sample_variance = Field(sh.domain, val=0.)
# sample_mean = Field(sh.domain, val=0.)
# # sampling the uncertainty map
# n_samples = 10
# for i in range(n_samples):
# sample = fft(sugar.generate_posterior_sample(0., D0))
# sample_variance += sample**2
# sample_mean += sample
# variance = (sample_variance - sample_mean**2)/n_samples
......@@ -48,8 +48,8 @@ class ConjugateGradient(Minimizer):
def __init__(self,
controller=GradientNormController(iteration_limit=100),
preconditioner=None):
super(ConjugateGradient, self).__init__(controller=controller)
self._preconditioner = preconditioner
self._controller = controller
def __call__(self, energy):
""" Runs the conjugate gradient minimization.
......
......@@ -45,8 +45,7 @@ class DescentMinimizer(Minimizer):
def __init__(self,
controller=GradientNormController(iteration_limit=100),
line_searcher=LineSearchStrongWolfe()):
super(DescentMinimizer, self).__init__()
self._controller = controller
super(DescentMinimizer, self).__init__(controller=controller)
self.line_searcher = line_searcher
def __call__(self, energy):
......
......@@ -22,8 +22,10 @@ from .iteration_controller import IterationController
class GradientNormController(IterationController):
def __init__(self, tol_abs_gradnorm=None, tol_rel_gradnorm=None,
convergence_level=1, iteration_limit=None, callback=None):
super(GradientNormController, self).__init__(callback=callback)
convergence_level=1, iteration_limit=None, callback=None,
logger=None):
super(GradientNormController, self).__init__(callback=callback,
logger=logger)
self._tol_abs_gradnorm = tol_abs_gradnorm
self._tol_rel_gradnorm = tol_rel_gradnorm
self._tol_rel_gradnorm_now = None
......@@ -56,7 +58,7 @@ class GradientNormController(IterationController):
self._convergence_count += 1
if self._iteration_limit is not None:
if self._iteration_count >= self._iteration_limit:
if self._iteration_count > self._iteration_limit:
self._print_debug_info(energy)
self.logger.info("Reached iteration limit. Stopping.")
return self.STOPPED
......
......@@ -45,10 +45,12 @@ class IterationController(
CONVERGED, CONTINUE, STOPPED, ERROR = list(range(4))
def __init__(self, callback=None):
def __init__(self, callback=None, logger=None):
self._iteration_count = 0
self._convergence_count = 0
self.callback = callback
if logger is not None:
self.logger = logger
@property
def iteration_count(self):
......
......@@ -27,6 +27,14 @@ class Minimizer(
with_metaclass(NiftyMeta, type('NewBase', (Loggable, object), {}))):
""" A base class used by all minimizers.
"""
def __init__(self, controller):
self._controller = controller
# overwrite the logger from controller to show contex correctly
self._controller.logger = self.logger
@property
def controller(self):
return self._controller
@abc.abstractmethod
def __call__(self, energy):
......
......@@ -22,6 +22,7 @@ import numpy as np
from .minimizer import Minimizer
from .line_searching import LineSearchStrongWolfe
from .iteration_controlling import GradientNormController
class NonlinearConjugateGradient(Minimizer):
......@@ -40,8 +41,9 @@ class NonlinearConjugateGradient(Minimizer):
"""
def __init__(self, controller, line_searcher=LineSearchStrongWolfe()):
self._controller = controller
def __init__(self, controller=GradientNormController(iteration_limit=100),
line_searcher=LineSearchStrongWolfe()):
super(NonlinearConjugateGradient, self).__init__(controller=controller)
self._line_searcher = line_searcher
def __call__(self, energy):
......
......@@ -18,10 +18,12 @@
from .descent_minimizer import DescentMinimizer
from .line_searching import LineSearchStrongWolfe
from .iteration_controlling import GradientNormController
class RelaxedNewton(DescentMinimizer):
def __init__(self, controller, line_searcher=LineSearchStrongWolfe()):
def __init__(self, controller=GradientNormController(iteration_limit=100),
line_searcher=LineSearchStrongWolfe()):
super(RelaxedNewton, self).__init__(controller=controller,
line_searcher=line_searcher)
......
......@@ -23,11 +23,12 @@ import numpy as np
from .descent_minimizer import DescentMinimizer
from .line_searching import LineSearchStrongWolfe
from .iteration_controlling import GradientNormController
class VL_BFGS(DescentMinimizer):
def __init__(self, controller, line_searcher=LineSearchStrongWolfe(),
max_history_length=5):
def __init__(self, controller=GradientNormController(iteration_limit=100),
line_searcher=LineSearchStrongWolfe(), max_history_length=5):
super(VL_BFGS, self).__init__(controller=controller,
line_searcher=line_searcher)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment