diff --git a/README.rst b/README.rst
index 09ed93af3f96b204ba8a4790d5b5cf62ba9db807..92e4c9831607b12b5d423098aa167572e3929b51 100644
--- a/README.rst
+++ b/README.rst
@@ -63,6 +63,7 @@ apply to fields.
         vector
     *   ``response_operator`` - exemplary responses that include a convolution,
         masking and projection
+    *   ``propagator_operator`` - information propagator in Wiener filter theory
     *   (and more)
 
 * (and more)
@@ -96,7 +97,7 @@ Requirements
 Download
 ........
 
-The latest release is tagged **v0.4.2** and is available as a source package
+The latest release is tagged **v0.6.0** and is available as a source package
 at `<https://github.com/mselig/nifty/tags>`_. The current version can be
 obtained by cloning the repository::
 
@@ -122,7 +123,7 @@ Please, acknowledge the use of NIFTY in your publication(s) by using a phrase
 such as the following:
 
     *"Some of the results in this publication have been derived using the NIFTY
-    [Selig et al., 2013] package."*
+    package [Selig et al., 2013]."*
 
 References
 ..........
diff --git a/demos/demo_wf1.py b/demos/demo_wf1.py
index e4035cb2fd365767a4a6c1db1b54cea69c8dece9..580f25ac8aecadb5e8c15045d5f9bccb025fe0ac 100644
--- a/demos/demo_wf1.py
+++ b/demos/demo_wf1.py
@@ -33,8 +33,6 @@
 """
 from __future__ import division
 from nifty import *                                                   # version 0.6.0
-from nifty.nifty_tools import *
-
 
 
 # some signal space; e.g., a two-dimensional regular grid
@@ -57,15 +55,11 @@ n = N.get_random_field(domain=d_space)                                # generate
 
 d = R(s) + n                                                          # compute data
 
-
-
 j = R.adjoint_times(N.inverse_times(d))                               # define information source
-D = propagator_operator(S=S,N=N,R=R)                                  # define information propagator
+D = propagator_operator(S=S, N=N, R=R)                                # define information propagator
 
 m = D(j, tol=1E-4, note=True)                                         # reconstruct map
 
-
-
 s.plot(title="signal")                                                # plot signal
 d_ = field(x_space, val=d.val, target=k_space)
 d_.plot(title="data", vmin=s.val.min(), vmax=s.val.max())             # plot data
diff --git a/demos/demo_wf2.py b/demos/demo_wf2.py
index 6c3def9a8f37e5e6c7fd485aec3bf0869d17f7d8..8d0b0350094f12de19a68c0ab0b68e8d0467af0a 100644
--- a/demos/demo_wf2.py
+++ b/demos/demo_wf2.py
@@ -33,8 +33,6 @@
 """
 from __future__ import division
 from nifty import *                                                   # version 0.6.0
-from nifty.nifty_tools import *
-
 
 
 # some signal space; e.g., a two-dimensional regular grid
@@ -57,10 +55,8 @@ n = N.get_random_field(domain=d_space)                                # generate
 
 d = R(s) + n                                                          # compute data
 
-
-
 j = R.adjoint_times(N.inverse_times(d))                               # define information source
-D = propagator_operator(S=S,N=N,R=R)                                  # define information propagator
+D = propagator_operator(S=S, N=N, R=R)                                # define information propagator
 
 
 def eggs(x):
@@ -68,17 +64,15 @@ def eggs(x):
         Calculation of the information Hamiltonian and its gradient.
 
     """
-    Dx = D.inverse_times(x)
-    H = 0.5 * Dx.dot(x) - j.dot(x)                                    # compute information Hamiltonian
-    g = Dx - j                                                        # compute its gradient
-    return H,g
+    DIx = D.inverse_times(x)
+    H = 0.5 * DIx.dot(x) - j.dot(x)                                    # compute information Hamiltonian
+    g = DIx - j                                                        # compute its gradient
+    return H, g
 
 
 m = field(x_space, target=k_space)                                    # reconstruct map
 m,convergence = steepest_descent(eggs=eggs, note=True)(m, tol=1E-4, clevel=3)
 
-
-
 s.plot(title="signal")                                                # plot signal
 d_ = field(x_space, val=d.val, target=k_space)
 d_.plot(title="data", vmin=s.val.min(), vmax=s.val.max())             # plot data
diff --git a/nifty_core.py b/nifty_core.py
index 308d89d68e1130e41d2d76696d6458348c302ace..10aebc13cb9ed14614a5d9de883e2dce0b959d15 100644
--- a/nifty_core.py
+++ b/nifty_core.py
@@ -486,7 +486,7 @@ class _about(object): ## nifty support class for global settings
 
         """
         ## version
-        self._version = "0.5.9"
+        self._version = "0.6.0"
 
         ## switches and notifications
         self._errors = notification(default=True,ccode=notification._code)
@@ -11703,17 +11703,17 @@ class probing(object):
 
     ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 
-    def evaluate(self,sum_,num_,var_):
+    def evaluate(self,summa,num,var):
         """
             Evaluates the probing results.
 
             Parameters
             ----------
-            sum_ : numpy.array
+            summa : numpy.array
                 Sum of all probing results.
-            num_ : int
+            num : int
                 Number of successful probings (not returning ``None``).
-            var_ : numpy.array
+            var : numpy.array
                 Sum of all squared probing results
 
             Returns
@@ -11727,27 +11727,27 @@ class probing(object):
                 (`final`,`var`).
 
         """
-        if(num_<self.nrun):
-            about.infos.cflush(" ( %u probe(s) failed, effectiveness == %.1f%% )\n"%(self.nrun-num_,100*num_/self.nrun))
-            if(num_==0):
+        if(num<self.nrun):
+            about.infos.cflush(" ( %u probe(s) failed, effectiveness == %.1f%% )\n"%(self.nrun-num,100*num/self.nrun))
+            if(num==0):
                 about.warnings.cprint("WARNING: probing failed.")
                 return None
         else:
             about.infos.cflush("\n")
 
-        if(sum_.size==1):
-            sum_ = sum_.flatten(order='C')[0]
-            var_ = var_.flatten(order='C')[0]
-        if(np.iscomplexobj(sum_))and(np.all(np.imag(sum_)==0)):
-            sum_ = np.real(sum_)
+        if(summa.size==1):
+            summa = summa.flatten(order='C')[0]
+            var = var.flatten(order='C')[0]
+        if(np.iscomplexobj(summa))and(np.all(np.imag(summa)==0)):
+            summa = np.real(summa)
 
-        final = sum_*(1/num_)
+        final = summa*(1/num)
         if(self.var):
-            if(num_==1):
+            if(num==1):
                 about.warnings.cprint("WARNING: infinite variance.")
                 return final,None
             else:
-                var = var_*(1/(num_*(num_-1)))-np.real(np.conjugate(final)*final)*(1/(num_-1))
+                var = var*(1/(num*(num-1)))-np.real(np.conjugate(final)*final)*(1/(num-1))
                 return final,var
         else:
             return final
@@ -11858,7 +11858,7 @@ class probing(object):
         ## evaluate
         return self.evaluate(_sum,_num,_var)
 
-    def __call__(self,loop=False,**kwargs): ## FIXME: doc
+    def __call__(self,loop=False,**kwargs):
         """
 
             Starts the probing process.
@@ -12129,17 +12129,17 @@ class trace_probing(probing):
 
     ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 
-    def evaluate(self,sum_,num_,var_):
+    def evaluate(self,summa,num,var):
         """
             Evaluates the probing results.
 
             Parameters
             ----------
-            sum_ : scalar
+            summa : scalar
                 Sum of all probing results.
-            num_ : int
+            num : int
                 Number of successful probings (not returning ``None``).
-            var_ : scalar
+            var : scalar
                 Sum of all squared probing results
 
             Returns
@@ -12153,24 +12153,24 @@ class trace_probing(probing):
                 (`final`,`var`).
 
         """
-        if(num_<self.nrun):
-            about.infos.cflush(" ( %u probe(s) failed, effectiveness == %.1f%% )\n"%(self.nrun-num_,100*num_/self.nrun))
-            if(num_==0):
+        if(num<self.nrun):
+            about.infos.cflush(" ( %u probe(s) failed, effectiveness == %.1f%% )\n"%(self.nrun-num,100*num/self.nrun))
+            if(num==0):
                 about.warnings.cprint("WARNING: probing failed.")
                 return None
         else:
             about.infos.cflush("\n")
 
         if(self.domain.datatype in [np.complex64,np.complex128]):
-            sum_ = np.real(sum_)
+            summa = np.real(summa)
 
-        final = sum_/num_
+        final = summa/num
         if(self.var):
-            if(num_==1):
+            if(num==1):
                 about.warnings.cprint("WARNING: infinite variance.")
                 return final,None
             else:
-                var = var_/(num_*(num_-1))-np.real(np.conjugate(final)*final)/(num_-1)
+                var = var/(num*(num-1))-np.real(np.conjugate(final)*final)/(num-1)
                 return final,var
         else:
             return final
diff --git a/nifty_power.py b/nifty_power.py
index 2c85028cba529b052d3cf9dc66050e9401981077..98758541ccda08163c60578ab3fcb5152638156e 100644
--- a/nifty_power.py
+++ b/nifty_power.py
@@ -28,11 +28,11 @@
     ..     /__/ /__/ /__/ /__/    \___/  \___   /  power
     ..                                  /______/
 
-    NIFTy offers a number of automatized routines for handling
+    NIFTY offers a number of automatized routines for handling
     power spectra. It is possible to draw a field from a random distribution
     with a certain autocorrelation or, equivalently, with a certain
     power spectrum in its conjugate space (see :py:func:`field.random`). In
-    NIFTy, it is usually assumed that such a field follows statistical
+    NIFTY, it is usually assumed that such a field follows statistical
     homogeneity and isotropy. Fields which are only statistically homogeneous
     can also be created using the diagonal operator routine.
 
diff --git a/nifty_tools.py b/nifty_tools.py
index 01ca1f1a101f4514347ac2430608551b1e861576..9732744515f4710e8fe5237743d593e7777dcf50 100644
--- a/nifty_tools.py
+++ b/nifty_tools.py
@@ -28,9 +28,12 @@
     ..     /__/ /__/ /__/ /__/    \___/  \___   /  tools
     ..                                  /______/
 
-    A nifty set of tools.
-
-    ## TODO: *DESCRIPTION*
+    This module extends NIFTY with a nifty set of tools including further
+    operators, namely the :py:class:`invertible_operator` and the
+    :py:class:`propagator_operator`, and minimization schemes, namely
+    :py:class:`steepest_descent` and :py:class:`conjugate_gradient`. Those
+    tools are supposed to support the user in solving information field
+    theoretical problems (almost) without numerical pain.
 
 """
 from __future__ import division
@@ -51,9 +54,10 @@ class invertible_operator(operator):
 
         NIFTY subclass for invertible, self-adjoint (linear) operators
 
-        The base NIFTY operator class is an abstract class from which other
-        specific operator subclasses, including those preimplemented in NIFTY
-        (e.g. the diagonal operator class) must be derived.
+        The invertible operator class is an abstract class for self-adjoint or
+        symmetric (linear) operators from which other more specific operator
+        subclassescan be derived. Such operators inherit an automated inversion
+        routine, namely conjugate gradient.
 
         Parameters
         ----------
@@ -76,9 +80,9 @@ class invertible_operator(operator):
 
         Notes
         -----
-        Operator classes derived from this one only need a `_multiply` or
-        `_inverse_multiply` instance method to perform the other. However, one
-        of them needs to be defined.
+        This class is not meant to be instantiated. Operator classes derived
+        from this one only need a `_multiply` or `_inverse_multiply` instance
+        method to perform the other. However, one of them needs to be defined.
 
         Attributes
         ----------
@@ -275,8 +279,8 @@ class propagator_operator(operator):
         NIFTY subclass for propagator operators (of a certain family)
 
         The propagator operators :math:`D` implemented here have an inverse
-        formulation like :math:`S^{-1} + M`, :math:`S^{-1} + N^{-1}`, or
-        :math:`S^{-1} + R^\dagger N^{-1} R` as appearing in Wiener filter
+        formulation like :math:`(S^{-1} + M)`, :math:`(S^{-1} + N^{-1})`, or
+        :math:`(S^{-1} + R^\dagger N^{-1} R)` as appearing in Wiener filter
         theory.
 
         Parameters
@@ -296,7 +300,7 @@ class propagator_operator(operator):
 
         Notes
         -----
-        The propagator will puzzle the operators `S` and `M` or `R`,`N` or
+        The propagator will puzzle the operators `S` and `M` or `R`, `N` or
         only `N` together in the predefined from, a domain is set
         automatically. The application of the inverse is done by invoking a
         conjugate gradient.
@@ -610,8 +614,8 @@ class conjugate_gradient(object):
         References
         ----------
         .. [#] J. R. Shewchuk, 1994, `"An Introduction to the Conjugate
-        Gradient Method Without the Agonizing Pain"
-        `<http://www.cs.cmu.edu/~quake-papers/painless-conjugate-gradient.pdf>`_
+            Gradient Method Without the Agonizing Pain"
+            <http://www.cs.cmu.edu/~quake-papers/painless-conjugate-gradient.pdf>`_
 
         Examples
         --------