From d5fe215dfbabbc34604e0617bc60549d3890bca4 Mon Sep 17 00:00:00 2001
From: Martin Reinecke <martin@mpa-garching.mpg.de>
Date: Tue, 13 Mar 2018 15:43:01 +0100
Subject: [PATCH] switch to Python logging

---
 demos/probing.py                                |  2 +-
 demos/wiener_filter_easy.py                     |  1 -
 nifty4/__init__.py                              |  2 ++
 nifty4/data_objects/distributed_do.py           |  5 -----
 nifty4/data_objects/numpy_do.py                 |  4 ----
 nifty4/dobj.py                                  |  5 +----
 nifty4/logger.py                                | 14 ++++++++++++++
 nifty4/minimization/conjugate_gradient.py       |  8 ++++----
 nifty4/minimization/descent_minimizer.py        |  6 +++---
 nifty4/minimization/gradient_norm_controller.py |  6 +++---
 nifty4/minimization/l_bfgs.py                   |  4 ++--
 nifty4/minimization/line_energy.py              |  6 +++---
 nifty4/minimization/line_search_strong_wolfe.py | 12 ++++++------
 nifty4/minimization/scipy_minimizer.py          |  5 +++--
 nifty4/operators/inversion_enabler.py           |  5 +++--
 nifty4/sugar.py                                 |  3 ++-
 16 files changed, 47 insertions(+), 41 deletions(-)
 create mode 100644 nifty4/logger.py

diff --git a/demos/probing.py b/demos/probing.py
index ae93129e2..58810fb3a 100644
--- a/demos/probing.py
+++ b/demos/probing.py
@@ -9,4 +9,4 @@ f = ift.Field.from_random(domain=x, random_type='normal')
 diagOp = ift.DiagonalOperator(f)
 
 diag = ift.probe_diagonal(diagOp, 1000)
-ift.dobj.mprint((f - diag).norm())
+ift.logger.info((f - diag).norm())
diff --git a/demos/wiener_filter_easy.py b/demos/wiener_filter_easy.py
index 8412ac722..a2f44d270 100644
--- a/demos/wiener_filter_easy.py
+++ b/demos/wiener_filter_easy.py
@@ -1,7 +1,6 @@
 import numpy as np
 import nifty4 as ift
 
-
 if __name__ == "__main__":
     np.random.seed(43)
     # Set up physical constants
diff --git a/nifty4/__init__.py b/nifty4/__init__.py
index 14b675522..1b9f7d62f 100644
--- a/nifty4/__init__.py
+++ b/nifty4/__init__.py
@@ -22,6 +22,8 @@ from . import extra
 
 from .utilities import memo
 
+from .logger import logger
+
 __all__ = ["__version__", "dobj", "DomainTuple"] + \
           domains.__all__ + operators.__all__ + minimization.__all__ + \
           ["DomainTuple", "Field", "sqrt", "exp", "log"]
diff --git a/nifty4/data_objects/distributed_do.py b/nifty4/data_objects/distributed_do.py
index f810723d6..9f6f05520 100644
--- a/nifty4/data_objects/distributed_do.py
+++ b/nifty4/data_objects/distributed_do.py
@@ -31,11 +31,6 @@ def is_numpy():
     return False
 
 
-def mprint(*args):
-    if master:
-        print(*args)
-
-
 def _shareSize(nwork, nshares, myshare):
     return (nwork//nshares) + int(myshare < nwork % nshares)
 
diff --git a/nifty4/data_objects/numpy_do.py b/nifty4/data_objects/numpy_do.py
index 201c106af..4a4e774e7 100644
--- a/nifty4/data_objects/numpy_do.py
+++ b/nifty4/data_objects/numpy_do.py
@@ -34,10 +34,6 @@ def is_numpy():
     return True
 
 
-def mprint(*args):
-    print(*args)
-
-
 def from_object(object, dtype, copy, set_locked):
     if dtype is None:
         dtype = object.dtype
diff --git a/nifty4/dobj.py b/nifty4/dobj.py
index 1444547f6..c4f502687 100644
--- a/nifty4/dobj.py
+++ b/nifty4/dobj.py
@@ -20,18 +20,15 @@ try:
     from mpi4py import MPI
     if MPI.COMM_WORLD.Get_size() == 1:
         from .data_objects.numpy_do import *
-        # mprint("MPI found, but only with one task, using numpy_do...")
     else:
         from .data_objects.distributed_do import *
-        # mprint("MPI with multiple tasks found, using distributed_do...")
 except ImportError:
     from .data_objects.numpy_do import *
-    # mprint("MPI not found, using numpy_do...")
 
 __all__ = ["ntask", "rank", "master", "local_shape", "data_object", "full",
            "empty", "zeros", "ones", "empty_like", "vdot", "abs", "exp",
            "log", "tanh", "sqrt", "from_object", "from_random",
            "local_data", "ibegin", "ibegin_from_shape", "np_allreduce_sum",
            "distaxis", "from_local_data", "from_global_data", "to_global_data",
-           "redistribute", "default_distaxis", "mprint", "is_numpy",
+           "redistribute", "default_distaxis", "is_numpy",
            "lock", "locked"]
diff --git a/nifty4/logger.py b/nifty4/logger.py
new file mode 100644
index 000000000..5801d07f6
--- /dev/null
+++ b/nifty4/logger.py
@@ -0,0 +1,14 @@
+def _logger_init():
+    import logging
+    from . import dobj
+    res = logging.getLogger('NIFTy4')
+    res.setLevel(logging.DEBUG)
+    if dobj.rank == 0:
+        ch = logging.StreamHandler()
+        ch.setLevel(logging.DEBUG)
+        res.addHandler(ch)
+    else:
+        res.addHandler(logging.NullHandler())
+    return res
+
+logger = _logger_init()
diff --git a/nifty4/minimization/conjugate_gradient.py b/nifty4/minimization/conjugate_gradient.py
index a752c5171..ad5d96360 100644
--- a/nifty4/minimization/conjugate_gradient.py
+++ b/nifty4/minimization/conjugate_gradient.py
@@ -18,7 +18,7 @@
 
 from __future__ import division
 from .minimizer import Minimizer
-from .. import dobj
+from ..logger import logger
 
 
 class ConjugateGradient(Minimizer):
@@ -76,12 +76,12 @@ class ConjugateGradient(Minimizer):
             q = energy.curvature(d)
             ddotq = d.vdot(q).real
             if ddotq == 0.:
-                dobj.mprint("Error: ConjugateGradient: ddotq==0.")
+                logger.error("Error: ConjugateGradient: ddotq==0.")
                 return energy, controller.ERROR
             alpha = previous_gamma/ddotq
 
             if alpha < 0:
-                dobj.mprint("Error: ConjugateGradient: alpha<0.")
+                logger.error("Error: ConjugateGradient: alpha<0.")
                 return energy, controller.ERROR
 
             q *= -alpha
@@ -93,7 +93,7 @@ class ConjugateGradient(Minimizer):
 
             gamma = r.vdot(s).real
             if gamma < 0:
-                dobj.mprint(
+                logger.error(
                     "Positive definiteness of preconditioner violated!")
                 return energy, controller.ERROR
             if gamma == 0:
diff --git a/nifty4/minimization/descent_minimizer.py b/nifty4/minimization/descent_minimizer.py
index d3fbea686..778957675 100644
--- a/nifty4/minimization/descent_minimizer.py
+++ b/nifty4/minimization/descent_minimizer.py
@@ -20,7 +20,7 @@ from __future__ import division
 import abc
 from .minimizer import Minimizer
 from .line_search_strong_wolfe import LineSearchStrongWolfe
-from .. import dobj
+from ..logger import logger
 
 
 class DescentMinimizer(Minimizer):
@@ -92,11 +92,11 @@ class DescentMinimizer(Minimizer):
             f_k_minus_1 = energy.value
 
             if new_energy.value > energy.value:
-                dobj.mprint("Error: Energy has increased")
+                logger.error("Error: Energy has increased")
                 return energy, controller.ERROR
 
             if new_energy.value == energy.value:
-                dobj.mprint(
+                logger.warning(
                     "Warning: Energy has not changed. Assuming convergence...")
                 return new_energy, controller.CONVERGED
 
diff --git a/nifty4/minimization/gradient_norm_controller.py b/nifty4/minimization/gradient_norm_controller.py
index 0da6b97ce..be94e7624 100644
--- a/nifty4/minimization/gradient_norm_controller.py
+++ b/nifty4/minimization/gradient_norm_controller.py
@@ -17,7 +17,7 @@
 # and financially supported by the Studienstiftung des deutschen Volkes.
 
 from .iteration_controller import IterationController
-from .. import dobj
+from ..logger import logger
 
 
 class GradientNormController(IterationController):
@@ -116,13 +116,13 @@ class GradientNormController(IterationController):
             msg += " energy={:.6E}".format(energy.value)
             msg += " gradnorm={:.2E}".format(energy.gradient_norm)
             msg += " clvl=" + str(self._ccount)
-            dobj.mprint(msg)
+            logger.info(msg)
             # self.logger.info(msg)
 
         # Are we done?
         if self._iteration_limit is not None:
             if self._itcount >= self._iteration_limit:
-                dobj.mprint(
+                logger.warning(
                     "Warning:Iteration limit reached. Assuming convergence")
                 return self.CONVERGED
         if self._ccount >= self._convergence_level:
diff --git a/nifty4/minimization/l_bfgs.py b/nifty4/minimization/l_bfgs.py
index c7e910e34..efb7be73b 100644
--- a/nifty4/minimization/l_bfgs.py
+++ b/nifty4/minimization/l_bfgs.py
@@ -21,7 +21,7 @@ from builtins import range
 import numpy as np
 from .descent_minimizer import DescentMinimizer
 from .line_search_strong_wolfe import LineSearchStrongWolfe
-from .. import dobj
+from ..logger import logger
 
 
 class L_BFGS(DescentMinimizer):
@@ -64,7 +64,7 @@ class L_BFGS(DescentMinimizer):
             idx = (k-1) % maxhist
             fact = s[idx].vdot(y[idx]) / y[idx].vdot(y[idx])
             if fact <= 0.:
-                dobj.mprint("L-BFGS curvature not positive definite!")
+                logger.error("L-BFGS curvature not positive definite!")
             p *= fact
             for i in range(k-nhist, k):
                 idx = i % maxhist
diff --git a/nifty4/minimization/line_energy.py b/nifty4/minimization/line_energy.py
index bcf8e90cf..742f06e6c 100644
--- a/nifty4/minimization/line_energy.py
+++ b/nifty4/minimization/line_energy.py
@@ -95,7 +95,7 @@ class LineEnergy(object):
         """
         res = self._energy.gradient.vdot(self._line_direction)
         if abs(res.imag) / max(abs(res.real), 1.) > 1e-12:
-            from ..dobj import mprint
-            mprint("directional derivative has non-negligible "
-                   "imaginary part:", res)
+            from ..logger import logger
+            logger.warn("directional derivative has non-negligible "
+                        "imaginary part:", res)
         return res.real
diff --git a/nifty4/minimization/line_search_strong_wolfe.py b/nifty4/minimization/line_search_strong_wolfe.py
index 662953f81..1e0137d0e 100644
--- a/nifty4/minimization/line_search_strong_wolfe.py
+++ b/nifty4/minimization/line_search_strong_wolfe.py
@@ -21,7 +21,7 @@ from builtins import range
 import numpy as np
 from .line_search import LineSearch
 from .line_energy import LineEnergy
-from .. import dobj
+from ..logger import logger
 
 
 class LineSearchStrongWolfe(LineSearch):
@@ -100,10 +100,10 @@ class LineSearchStrongWolfe(LineSearch):
         phi_0 = le_0.value
         phiprime_0 = le_0.directional_derivative
         if phiprime_0 == 0:
-            dobj.mprint("Directional derivative is zero; assuming convergence")
+            logger.warn("Directional derivative is zero; assuming convergence")
             return energy, False
         if phiprime_0 > 0:
-            dobj.mprint("Error: search direction is not a descent direction")
+            logger.error("Error: search direction is not a descent direction")
             return energy, False
 
         # set alphas
@@ -149,13 +149,13 @@ class LineSearchStrongWolfe(LineSearch):
             # update alphas
             alpha0, alpha1 = alpha1, min(2*alpha1, maxstepsize)
             if alpha1 == maxstepsize:
-                dobj.mprint("max step size reached")
+                logger.warn("max step size reached")
                 return le_alpha1.energy, False
 
             phi_alpha0 = phi_alpha1
             phiprime_alpha0 = phiprime_alpha1
 
-        dobj.mprint("max iterations reached")
+        logger.warn("max iterations reached")
         return le_alpha1.energy, False
 
     def _zoom(self, alpha_lo, alpha_hi, phi_0, phiprime_0,
@@ -252,7 +252,7 @@ class LineSearchStrongWolfe(LineSearch):
                                                    phiprime_alphaj)
 
         else:
-            dobj.mprint("The line search algorithm (zoom) did not converge.")
+            logger.warn("The line search algorithm (zoom) did not converge.")
             return le_alphaj.energy, False
 
     def _cubicmin(self, a, fa, fpa, b, fb, c, fc):
diff --git a/nifty4/minimization/scipy_minimizer.py b/nifty4/minimization/scipy_minimizer.py
index 7948f83fc..4c94b8c41 100644
--- a/nifty4/minimization/scipy_minimizer.py
+++ b/nifty4/minimization/scipy_minimizer.py
@@ -20,6 +20,7 @@ from __future__ import division
 from .minimizer import Minimizer
 from ..field import Field
 from .. import dobj
+from ..logger import logger
 
 
 class ScipyMinimizer(Minimizer):
@@ -97,9 +98,9 @@ class ScipyMinimizer(Minimizer):
             status = self._controller.check(hlp._energy)
             return hlp._energy, self._controller.check(hlp._energy)
         if not r.success:
-            dobj.mprint("Problem in Scipy minimization:", r.message)
+            logger.error("Problem in Scipy minimization:", r.message)
         else:
-            dobj.mprint("Problem in Scipy minimization")
+            logger.error("Problem in Scipy minimization")
         return hlp._energy, self._controller.ERROR
 
 
diff --git a/nifty4/operators/inversion_enabler.py b/nifty4/operators/inversion_enabler.py
index 2151dc90d..ea6a436c4 100644
--- a/nifty4/operators/inversion_enabler.py
+++ b/nifty4/operators/inversion_enabler.py
@@ -18,7 +18,8 @@
 
 from ..minimization.quadratic_energy import QuadraticEnergy
 from ..minimization.iteration_controller import IterationController
-from ..field import Field, dobj
+from ..field import Field
+from ..logger import logger
 from .linear_operator import LinearOperator
 
 
@@ -71,5 +72,5 @@ class InversionEnabler(LinearOperator):
         energy = QuadraticEnergy(A=func, b=x, position=x0)
         r, stat = self._inverter(energy, preconditioner=self._preconditioner)
         if stat != IterationController.CONVERGED:
-            dobj.mprint("Error detected during operator inversion")
+            logger.warning("Error detected during operator inversion")
         return r.position
diff --git a/nifty4/sugar.py b/nifty4/sugar.py
index 144c1952e..aa43de37f 100644
--- a/nifty4/sugar.py
+++ b/nifty4/sugar.py
@@ -23,6 +23,7 @@ from .operators.diagonal_operator import DiagonalOperator
 from .operators.power_distributor import PowerDistributor
 from .domain_tuple import DomainTuple
 from . import dobj, utilities
+from .logger import logger
 
 __all__ = ['PS_field',
            'power_analyze',
@@ -85,7 +86,7 @@ def power_analyze(field, spaces=None, binbounds=None,
 
     for sp in field.domain:
         if not sp.harmonic and not isinstance(sp, PowerSpace):
-            dobj.mprint("WARNING: Field has a space in `domain` which is "
+            logger.warn("WARNING: Field has a space in `domain` which is "
                         "neither harmonic nor a PowerSpace.")
 
     spaces = utilities.parse_spaces(spaces, len(field.domain))
-- 
GitLab