diff --git a/Dockerfile b/Dockerfile
deleted file mode 100644
index 4714f9eb9016ad5cad8ffadbfa00012b320a17f5..0000000000000000000000000000000000000000
--- a/Dockerfile
+++ /dev/null
@@ -1,21 +0,0 @@
-#FROM ubuntu:artful
-FROM debian:testing-slim
-
-# dependencies via apt
-RUN apt-get update
-ADD ci/install_basics.sh /tmp/install_basics.sh
-RUN sh /tmp/install_basics.sh
-
-
-# python dependencies
-ADD ci/requirements.txt /tmp/requirements.txt
-RUN pip install --process-dependency-links -r /tmp/requirements.txt
-
-
-# copy sources and install nifty
-COPY . /tmp/NIFTy
-RUN pip install /tmp/NIFTy
-
-
-# Cleanup
-RUN rm -r /tmp/*
diff --git a/ci/requirements.txt b/ci/requirements.txt
index 64c0d6b7cded1485e406f03c20098857eeab249c..37695fba67585adcd6fabc5adcaa1a05316891ba 100644
--- a/ci/requirements.txt
+++ b/ci/requirements.txt
@@ -1,6 +1,6 @@
 parameterized
 coverage
 git+https://gitlab.mpcdf.mpg.de/ift/pyHealpix.git
-sphinx
+sphinx==1.6.7
 sphinx_rtd_theme
 numpydoc
diff --git a/demos/critical_filtering.py b/demos/critical_filtering.py
index d0c026e63b8d70e0dfffab74b7f045b5e1a10c9b..81cfbeb35c44d732741c819b647c75f1b349b580 100644
--- a/demos/critical_filtering.py
+++ b/demos/critical_filtering.py
@@ -8,7 +8,7 @@ np.random.seed(42)
 if __name__ == "__main__":
     # Set up position space
     s_space = ift.RGSpace([128, 128])
-    #s_space = ift.HPSpace(32)
+    # s_space = ift.HPSpace(32)
 
     # Define harmonic transformation and associated harmonic space
     h_space = s_space.get_default_codomain()
@@ -63,7 +63,8 @@ if __name__ == "__main__":
                                      tol_abs_gradnorm=1e-15)
     minimizer = ift.RelaxedNewton(IC1)
 
-    ICI = ift.GradientNormController(iteration_limit=500, tol_abs_gradnorm=1e-10)
+    ICI = ift.GradientNormController(iteration_limit=500,
+                                     tol_abs_gradnorm=1e-10)
     map_inverter = ift.ConjugateGradient(controller=ICI)
 
     ICI2 = ift.GradientNormController(iteration_limit=200,
diff --git a/demos/nonlinear_critical_filter.py b/demos/nonlinear_critical_filter.py
index f8eb8236d543ae7fe26df90b3fbe693881e77e5a..b9c9f04a31582ea5e85f391d6f7f3bc8ceb31578 100644
--- a/demos/nonlinear_critical_filter.py
+++ b/demos/nonlinear_critical_filter.py
@@ -53,8 +53,9 @@ if __name__ == "__main__":
     MaskOperator = ift.DiagonalOperator(mask)
     R = ift.GeometryRemover(s_space)
     R = R*MaskOperator
-    #R = R*HT
-    #R = R * ift.create_harmonic_smoothing_operator((harmonic_space,),0,response_sigma)
+    # R = R*HT
+    # R = R * ift.create_harmonic_smoothing_operator((harmonic_space,), 0,
+    #                                                response_sigma)
     MeasurementOperator = R
 
     d_space = MeasurementOperator.target
diff --git a/demos/paper_demos/cartesian_wiener_filter.py b/demos/paper_demos/cartesian_wiener_filter.py
index 96602a9f2850146b52c352a7f47ed760ccad2e02..f555e3782c0a762852eae85c454b7f26738ec8c3 100644
--- a/demos/paper_demos/cartesian_wiener_filter.py
+++ b/demos/paper_demos/cartesian_wiener_filter.py
@@ -71,8 +71,8 @@ if __name__ == "__main__":
     mask_2 = ift.Field(signal_space_2, ift.dobj.from_global_data(mask_2))
 
     R = ift.GeometryRemover(signal_domain)
-    R = R*ift.DiagonalOperator(mask_1, signal_domain,spaces=0)
-    R = R*ift.DiagonalOperator(mask_2, signal_domain,spaces=1)
+    R = R*ift.DiagonalOperator(mask_1, signal_domain, spaces=0)
+    R = R*ift.DiagonalOperator(mask_2, signal_domain, spaces=1)
     R = R*ht
     R = R * ift.create_harmonic_smoothing_operator(harmonic_domain, 0,
                                                    response_sigma_1)
@@ -101,11 +101,13 @@ if __name__ == "__main__":
 
     plotdict = {"colormap": "Planck-like"}
     plot_space = ift.RGSpace((N_pixels_1, N_pixels_2))
-    ift.plot(ift.Field(plot_space,val=ht(mock_signal).val), name='mock_signal.png',
-             **plotdict)
-    ift.plot(ift.Field(plot_space,val=data.val), name='data.png', **plotdict)
-    ift.plot(ift.Field(plot_space,val=m.val), name='map.png', **plotdict)
+    ift.plot(ift.Field(plot_space, val=ht(mock_signal).val),
+             name='mock_signal.png', **plotdict)
+    ift.plot(ift.Field(plot_space, val=data.val), name='data.png', **plotdict)
+    ift.plot(ift.Field(plot_space, val=m.val), name='map.png', **plotdict)
     # sampling the uncertainty map
     mean, variance = ift.probe_with_posterior_samples(wiener_curvature, ht, 10)
-    ift.plot(ift.Field(plot_space, val=ift.sqrt(variance).val), name="uncertainty.png", **plotdict)
-    ift.plot(ift.Field(plot_space, val=(mean+m).val), name="posterior_mean.png", **plotdict)
+    ift.plot(ift.Field(plot_space, val=ift.sqrt(variance).val),
+             name="uncertainty.png", **plotdict)
+    ift.plot(ift.Field(plot_space, val=(mean+m).val),
+             name="posterior_mean.png", **plotdict)
diff --git a/demos/paper_demos/wiener_filter.py b/demos/paper_demos/wiener_filter.py
index a17f002cda07423302582eae3453ebc736c61451..f30f0f3dbf7b919907f0d7015738bbb014331cc8 100644
--- a/demos/paper_demos/wiener_filter.py
+++ b/demos/paper_demos/wiener_filter.py
@@ -38,7 +38,8 @@ if __name__ == "__main__":
     R = ift.GeometryRemover(signal_space)
     R = R*ift.DiagonalOperator(mask)
     R = R*ht
-    R = R * ift.create_harmonic_smoothing_operator((harmonic_space,),0,response_sigma)
+    R = R * ift.create_harmonic_smoothing_operator((harmonic_space,), 0,
+                                                   response_sigma)
     data_domain = R.target[0]
 
     noiseless_data = R(mock_signal)
diff --git a/demos/wiener_filter_via_hamiltonian.py b/demos/wiener_filter_via_hamiltonian.py
index 8dd1fb854e87ae535d43b116a838a9311fad5753..243ef350c40fbfc4efbc20a6dbe305556e42b238 100644
--- a/demos/wiener_filter_via_hamiltonian.py
+++ b/demos/wiener_filter_via_hamiltonian.py
@@ -85,5 +85,5 @@ if __name__ == "__main__":
 
     # try to do the same with diagonal probing
     variance = ift.probe_diagonal(ht*curv.inverse*ht.adjoint, 100)
-    #sm = ift.FFTSmoothingOperator(s_space, sigma=0.005)
-    ift.plot(variance, name="posterior_variance2.png", **plotdict2)
+    # sm = ift.FFTSmoothingOperator(s_space, sigma=0.015)
+    ift.plot(variance, name="posterior_variance2.png", **plotdict)
diff --git a/docs/better_apidoc.py b/docs/better_apidoc.py
new file mode 100644
index 0000000000000000000000000000000000000000..75fe8f4ee2a525a8f1920fc3945db90fcebde2cb
--- /dev/null
+++ b/docs/better_apidoc.py
@@ -0,0 +1,665 @@
+# -*- coding: utf-8 -*-
+"""
+    better apidoc
+    ~~~~~~~~~~~~~
+
+    Parses a directory tree looking for Python modules and packages and creates
+    ReST files appropriately to create code documentation with Sphinx.  It also
+    creates a modules index (named modules.<suffix>).
+
+    This is derived from the "sphinx-apidoc" script, which is:
+    Copyright 2007-2016 by the Sphinx team
+    http://www.sphinx-doc.org
+
+    It extends "sphinx-apidoc" by the --template / -t option, which allows to
+    render the output ReST files based on arbitrary Jinja templates.
+
+    :copyright: Copyright 2017 by Michael Goerz
+    :license: BSD, see LICENSE for details.
+"""
+from __future__ import print_function
+
+import os
+import sys
+import importlib
+import optparse
+from os import path
+from six import binary_type
+from fnmatch import fnmatch
+
+from jinja2 import FileSystemLoader, TemplateNotFound
+from jinja2.sandbox import SandboxedEnvironment
+
+from sphinx.util.osutil import FileAvoidWrite, walk
+#from sphinx import __display_version__
+from sphinx.quickstart import EXTENSIONS
+
+from sphinx.ext.autosummary import get_documenter
+from sphinx.util.inspect import safe_getattr
+
+# Add documenters to AutoDirective registry
+from sphinx.ext.autodoc import add_documenter, \
+    ModuleDocumenter, ClassDocumenter, ExceptionDocumenter, DataDocumenter, \
+    FunctionDocumenter, MethodDocumenter, AttributeDocumenter, \
+    InstanceAttributeDocumenter
+add_documenter(ModuleDocumenter)
+add_documenter(ClassDocumenter)
+add_documenter(ExceptionDocumenter)
+add_documenter(DataDocumenter)
+add_documenter(FunctionDocumenter)
+add_documenter(MethodDocumenter)
+add_documenter(AttributeDocumenter)
+add_documenter(InstanceAttributeDocumenter)
+
+__version__ = '0.1.2'
+__display_version__ = __version__
+
+if False:
+    # For type annotation
+    from typing import Any, List, Tuple  # NOQA
+
+# automodule options
+if 'SPHINX_APIDOC_OPTIONS' in os.environ:
+    OPTIONS = os.environ['SPHINX_APIDOC_OPTIONS'].split(',')
+else:
+    OPTIONS = [
+        'members',
+        'undoc-members',
+        # 'inherited-members', # disabled because there's a bug in sphinx
+        'show-inheritance',
+    ]
+
+INITPY = '__init__.py'
+PY_SUFFIXES = set(['.py', '.pyx'])
+
+
+def _warn(msg):
+    # type: (unicode) -> None
+    print('WARNING: ' + msg, file=sys.stderr)
+
+
+def makename(package, module):
+    # type: (unicode, unicode) -> unicode
+    """Join package and module with a dot."""
+    # Both package and module can be None/empty.
+    if package:
+        name = package
+        if module:
+            name += '.' + module
+    else:
+        name = module
+    return name
+
+
+def write_file(name, text, opts):
+    # type: (unicode, unicode, Any) -> None
+    """Write the output file for module/package <name>."""
+    fname = path.join(opts.destdir, '%s.%s' % (name, opts.suffix))
+    if opts.dryrun:
+        print('Would create file %s.' % fname)
+        return
+    if not opts.force and path.isfile(fname):
+        print('File %s already exists, skipping.' % fname)
+    else:
+        print('Creating file %s.' % fname)
+        with FileAvoidWrite(fname) as f:
+            f.write(text)
+
+
+def format_heading(level, text):
+    # type: (int, unicode) -> unicode
+    """Create a heading of <level> [1, 2 or 3 supported]."""
+    underlining = ['=', '-', '~', ][level - 1] * len(text)
+    return '%s\n%s\n\n' % (text, underlining)
+
+
+def format_directive(module, package=None):
+    # type: (unicode, unicode) -> unicode
+    """Create the automodule directive and add the options."""
+    directive = '.. automodule:: %s\n' % makename(package, module)
+    for option in OPTIONS:
+        directive += '    :%s:\n' % option
+    return directive
+
+
+def create_module_file(package, module, opts):
+    # type: (unicode, unicode, Any) -> None
+    """Generate RST for a top-level module (i.e., not part of a package)"""
+    if not opts.noheadings:
+        text = format_heading(1, '%s module' % module)
+    else:
+        text = ''
+    # text += format_heading(2, ':mod:`%s` Module' % module)
+    text += format_directive(module, package)
+
+    if opts.templates:
+        template_loader = FileSystemLoader(opts.templates)
+        template_env = SandboxedEnvironment(loader=template_loader)
+        try:
+            mod_ns = _get_mod_ns(
+                name=module, fullname=module,
+                includeprivate=opts.includeprivate)
+            template = template_env.get_template('module.rst')
+            text = template.render(**mod_ns)
+        except ImportError as e:
+            _warn('failed to import %r: %s' % (module, e))
+    write_file(makename(package, module), text, opts)
+
+
+def _get_members(
+        mod, typ=None, include_imported=False, as_refs=False, in__all__=False):
+    """Get (filtered) public/total members of the module or package `mod`.
+
+    Args:
+        mod: object resulting from importing a module or package
+        typ: filter on members. If None, include all members. If one of
+            'function', 'class', 'exception', 'data', only include members of
+            the matching type
+        include_imported: If True, also include members that are imports
+        as_refs: If True, return ReST-formatted reference strings for all
+            members, instead of just their names. In combinations with
+            `include_imported` or `in__all__`, these link to the original
+            location where the member is defined
+        in__all__: If True, return only members that are in ``mod.__all__``
+
+    Returns:
+        lists `public` and `items`. The lists contains the public and private +
+        public  members, as strings.
+
+    Note:
+        For data members, there is no way to tell whether they were imported or
+        defined locally (without parsing the source code). A module may define
+        one or both attributes
+
+        __local_data__: list of names of data objects defined locally
+        __imported_data__: dict of names to ReST-formatted references of where
+            a data object originates
+
+        If either one of these attributes is present, the member will be
+        classified accordingly. Otherwise, it will be classified as local if it
+        appeard in the __all__ list, or as imported otherwise
+
+    """
+    roles = {'function': 'func', 'module': 'mod', 'class': 'class',
+             'exception': 'exc', 'data': 'data'}
+    # not included, because they cannot occur at modul level:
+    #   'method': 'meth', 'attribute': 'attr', 'instanceattribute': 'attr'
+
+    def check_typ(typ, mod, member):
+        """Check if mod.member is of the desired typ"""
+        documenter = get_documenter(member, mod)
+        if typ is None:
+            return True
+        if typ == getattr(documenter, 'objtype', None):
+            return True
+        if hasattr(documenter, 'directivetype'):
+            return roles[typ] == getattr(documenter, 'directivetype')
+
+    def is_local(mod, member, name):
+        """Check whether mod.member is defined locally in module mod"""
+        if hasattr(member, '__module__'):
+            return getattr(member, '__module__') == mod.__name__
+        else:
+            # we take missing __module__ to mean the member is a data object
+            if hasattr(mod, '__local_data__'):
+                return name in getattr(mod, '__local_data__')
+            if hasattr(mod, '__imported_data__'):
+                return name not in getattr(mod, '__imported_data__')
+            else:
+                return name in getattr(mod, '__all__', [])
+
+    if typ is not None and typ not in roles:
+        raise ValueError("typ must be None or one of %s"
+                         % str(list(roles.keys())))
+    items = []
+    public = []
+    all_list = getattr(mod, '__all__', [])
+    for name in dir(mod):
+        try:
+            member = safe_getattr(mod, name)
+        except AttributeError:
+            continue
+        if check_typ(typ, mod, member):
+            if in__all__ and name not in all_list:
+                continue
+            if include_imported or is_local(mod, member, name):
+                if as_refs:
+                    documenter = get_documenter(member, mod)
+                    role = roles.get(documenter.objtype, 'obj')
+                    ref = _get_member_ref_str(
+                            name, obj=member, role=role,
+                            known_refs=getattr(mod, '__imported_data__', None))
+                    items.append(ref)
+                    if not name.startswith('_'):
+                        public.append(ref)
+                else:
+                    items.append(name)
+                    if not name.startswith('_'):
+                        public.append(name)
+    return public, items
+
+
+def _get_member_ref_str(name, obj, role='obj', known_refs=None):
+    """generate a ReST-formmated reference link to the given `obj` of type
+    `role`, using `name` as the link text"""
+    if known_refs is not None:
+        if name in known_refs:
+            return known_refs[name]
+    if hasattr(obj, '__name__'):
+        try:
+            ref = obj.__module__ + '.' + obj.__name__
+        except AttributeError:
+            ref = obj.__name__
+        except TypeError:  # e.g. obj.__name__ is None
+            ref = name
+    else:
+        ref = name
+    return ":%s:`%s <%s>`" % (role, name, ref)
+
+
+def _get_mod_ns(name, fullname, includeprivate):
+    """Return the template context of module identified by `fullname` as a
+    dict"""
+    ns = {  # template variables
+        'name': name, 'fullname': fullname, 'members': [], 'functions': [],
+        'classes': [], 'exceptions': [], 'subpackages': [], 'submodules': [],
+        'all_refs': [], 'members_imports': [], 'members_imports_refs': [],
+        'data': [], 'doc':None}
+    p = 0
+    if includeprivate:
+        p = 1
+    mod = importlib.import_module(fullname)
+    ns['members'] = _get_members(mod)[p]
+    ns['functions'] = _get_members(mod, typ='function')[p]
+    ns['classes'] = _get_members(mod, typ='class')[p]
+    ns['exceptions'] = _get_members(mod, typ='exception')[p]
+    ns['all_refs'] = _get_members(mod, include_imported=True, in__all__=True, as_refs=True)[p]
+    ns['members_imports'] = _get_members(mod, include_imported=True)[p]
+    ns['members_imports_refs'] = _get_members(mod, include_imported=True, as_refs=True)[p]
+    ns['data'] = _get_members(mod, typ='data')[p]
+    ns['doc'] = mod.__doc__
+    return ns
+
+
+def create_package_file(root, master_package, subroot, py_files, opts, subs, is_namespace):
+    # type: (unicode, unicode, unicode, List[unicode], Any, List[unicode], bool) -> None
+    """Build the text of the file and write the file."""
+
+    use_templates = False
+    if opts.templates:
+        use_templates = True
+        template_loader = FileSystemLoader(opts.templates)
+        template_env = SandboxedEnvironment(loader=template_loader)
+
+    fullname = makename(master_package, subroot)
+
+    text = format_heading(
+        1, ('%s package' if not is_namespace else "%s namespace") % fullname)
+
+    if opts.modulefirst and not is_namespace:
+        text += format_directive(subroot, master_package)
+        text += '\n'
+
+    # build a list of directories that are szvpackages (contain an INITPY file)
+    subs = [sub for sub in subs if path.isfile(path.join(root, sub, INITPY))]
+    # if there are some package directories, add a TOC for theses subpackages
+    if subs:
+        text += format_heading(2, 'Subpackages')
+        text += '.. toctree::\n\n'
+        for sub in subs:
+            text += '    %s.%s\n' % (makename(master_package, subroot), sub)
+        text += '\n'
+
+    submods = [path.splitext(sub)[0] for sub in py_files
+               if not shall_skip(path.join(root, sub), opts) and
+               sub != INITPY]
+
+
+    if submods:
+        text += format_heading(2, 'Submodules')
+        if opts.separatemodules:
+            text += '.. toctree::\n\n'
+            for submod in submods:
+                modfile = makename(master_package, makename(subroot, submod))
+                text += '   %s\n' % modfile
+
+                # generate separate file for this module
+                if not opts.noheadings:
+                    filetext = format_heading(1, '%s module' % modfile)
+                else:
+                    filetext = ''
+                filetext += format_directive(makename(subroot, submod),
+                                             master_package)
+                if use_templates:
+                    try:
+                        mod_ns = _get_mod_ns(
+                            name=submod, fullname=modfile,
+                            includeprivate=opts.includeprivate)
+                        template = template_env.get_template('module.rst')
+                        filetext = template.render(**mod_ns)
+                    except ImportError as e:
+                        _warn('failed to import %r: %s' % (modfile, e))
+                write_file(modfile, filetext, opts)
+        else:
+            for submod in submods:
+                modfile = makename(master_package, makename(subroot, submod))
+                if not opts.noheadings:
+                    text += format_heading(2, '%s module' % modfile)
+                text += format_directive(makename(subroot, submod),
+                                         master_package)
+                text += '\n'
+        text += '\n'
+
+    if use_templates:
+        try:
+            package_ns = _get_mod_ns(name=subroot, fullname=fullname,
+                                     includeprivate=opts.includeprivate)
+            package_ns['subpackages'] = subs
+            package_ns['submodules'] = submods
+
+            template = template_env.get_template('package.rst')
+            text = template.render(**package_ns)
+
+        except ImportError as e:
+            _warn('failed to import %r: %s' % (fullname, e))
+    else:
+        if not opts.modulefirst and not is_namespace:
+            text += format_heading(2, 'Module contents')
+            text += format_directive(subroot, master_package)
+
+    write_file(makename(master_package, subroot), text, opts)
+
+
+def create_modules_toc_file(modules, opts, name='modules'):
+    # type: (List[unicode], Any, unicode) -> None
+    """Create the module's index."""
+    text = format_heading(1, '%s' % opts.header)
+    text += '.. toctree::\n'
+    text += '   :maxdepth: %s\n\n' % opts.maxdepth
+
+    modules.sort()
+    prev_module = ''  # type: unicode
+    for module in modules:
+        # look if the module is a subpackage and, if yes, ignore it
+        if module.startswith(prev_module + '.'):
+            continue
+        prev_module = module
+        text += '   %s\n' % module
+
+    write_file(name, text, opts)
+
+
+def shall_skip(module, opts):
+    # type: (unicode, Any) -> bool
+    """Check if we want to skip this module."""
+    # skip if the file doesn't exist and not using implicit namespaces
+    if not opts.implicit_namespaces and not path.exists(module):
+        return True
+
+    # skip it if there is nothing (or just \n or \r\n) in the file
+    if path.exists(module) and path.getsize(module) <= 2:
+        return True
+
+    # skip if it has a "private" name and this is selected
+    filename = path.basename(module)
+    if filename != '__init__.py' and filename.startswith('_') and \
+       not opts.includeprivate:
+        return True
+    return False
+
+
+def recurse_tree(rootpath, excludes, opts):
+    # type: (unicode, List[unicode], Any) -> List[unicode]
+    """
+    Look for every file in the directory tree and create the corresponding
+    ReST files.
+    """
+    # check if the base directory is a package and get its name
+    if INITPY in os.listdir(rootpath):
+        root_package = rootpath.split(path.sep)[-1]
+    else:
+        # otherwise, the base is a directory with packages
+        root_package = None
+
+    toplevels = []
+    followlinks = getattr(opts, 'followlinks', False)
+    includeprivate = getattr(opts, 'includeprivate', False)
+    implicit_namespaces = getattr(opts, 'implicit_namespaces', False)
+    for root, subs, files in walk(rootpath, followlinks=followlinks):
+        # document only Python module files (that aren't excluded)
+        py_files = sorted(f for f in files
+                          if path.splitext(f)[1] in PY_SUFFIXES and
+                          not is_excluded(path.join(root, f), excludes))
+        is_pkg = INITPY in py_files
+        is_namespace = INITPY not in py_files and implicit_namespaces
+        if is_pkg:
+            py_files.remove(INITPY)
+            py_files.insert(0, INITPY)
+        elif root != rootpath:
+            # only accept non-package at toplevel unless using implicit namespaces
+            if not implicit_namespaces:
+                del subs[:]
+                continue
+        # remove hidden ('.') and private ('_') directories, as well as
+        # excluded dirs
+        if includeprivate:
+            exclude_prefixes = ('.',)  # type: Tuple[unicode, ...]
+        else:
+            exclude_prefixes = ('.', '_')
+        subs[:] = sorted(sub for sub in subs if not sub.startswith(exclude_prefixes) and
+                         not is_excluded(path.join(root, sub), excludes))
+
+        if is_pkg or is_namespace:
+            # we are in a package with something to document
+            if subs or len(py_files) > 1 or not shall_skip(path.join(root, INITPY), opts):
+                subpackage = root[len(rootpath):].lstrip(path.sep).\
+                    replace(path.sep, '.')
+                # if this is not a namespace or
+                # a namespace and there is something there to document
+                if not is_namespace or len(py_files) > 0:
+                    create_package_file(root, root_package, subpackage,
+                                        py_files, opts, subs, is_namespace)
+                    toplevels.append(makename(root_package, subpackage))
+        else:
+            # if we are at the root level, we don't require it to be a package
+            assert root == rootpath and root_package is None
+            if opts.templates:
+                sys.path.insert(0, rootpath)
+            for py_file in py_files:
+                if not shall_skip(path.join(rootpath, py_file), opts):
+                    module = path.splitext(py_file)[0]
+                    create_module_file(root_package, module, opts)
+                    toplevels.append(module)
+            if opts.templates:
+                sys.path.pop(0)
+
+    return toplevels
+
+
+def normalize_excludes(rootpath, excludes):
+    # type: (unicode, List[unicode]) -> List[unicode]
+    """Normalize the excluded directory list."""
+    return [path.abspath(exclude) for exclude in excludes]
+
+
+def is_excluded(root, excludes):
+    # type: (unicode, List[unicode]) -> bool
+    """Check if the directory is in the exclude list.
+
+    Note: by having trailing slashes, we avoid common prefix issues, like
+          e.g. an exlude "foo" also accidentally excluding "foobar".
+    """
+    for exclude in excludes:
+        if fnmatch(root, exclude):
+            return True
+    return False
+
+
+def main(argv=sys.argv):
+    # type: (List[str]) -> int
+    """Parse and check the command line arguments."""
+    parser = optparse.OptionParser(
+        usage="""\
+usage: %prog [options] -o <output_path> <module_path> [exclude_pattern, ...]
+
+Look recursively in <module_path> for Python modules and packages and create
+one reST file with automodule directives per package in the <output_path>.
+
+The <exclude_pattern>s can be file and/or directory patterns that will be
+excluded from generation.
+
+Note: By default this script will not overwrite already created files.""")
+
+    parser.add_option('-o', '--output-dir', action='store', dest='destdir',
+                      help='Directory to place all output', default='')
+    parser.add_option('-d', '--maxdepth', action='store', dest='maxdepth',
+                      help='Maximum depth of submodules to show in the TOC '
+                      '(default: 4)', type='int', default=4)
+    parser.add_option('-f', '--force', action='store_true', dest='force',
+                      help='Overwrite existing files')
+    parser.add_option('-l', '--follow-links', action='store_true',
+                      dest='followlinks', default=False,
+                      help='Follow symbolic links. Powerful when combined '
+                      'with collective.recipe.omelette.')
+    parser.add_option('-n', '--dry-run', action='store_true', dest='dryrun',
+                      help='Run the script without creating files')
+    parser.add_option('-e', '--separate', action='store_true',
+                      dest='separatemodules',
+                      help='Put documentation for each module on its own page')
+    parser.add_option('-P', '--private', action='store_true',
+                      dest='includeprivate',
+                      help='Include "_private" modules')
+    parser.add_option('-T', '--no-toc', action='store_true', dest='notoc',
+                      help='Don\'t create a table of contents file')
+    parser.add_option('-E', '--no-headings', action='store_true',
+                      dest='noheadings',
+                      help='Don\'t create headings for the module/package '
+                           'packages (e.g. when the docstrings already contain '
+                           'them). No effect in combination with -t')
+    parser.add_option('-M', '--module-first', action='store_true',
+                      dest='modulefirst',
+                      help='Put module documentation before submodule '
+                      'documentation (no effect in combination with -t)')
+    parser.add_option('--implicit-namespaces', action='store_true',
+                      dest='implicit_namespaces',
+                      help='Interpret module paths according to PEP-0420 '
+                           'implicit namespaces specification')
+    parser.add_option('-s', '--suffix', action='store', dest='suffix',
+                      help='file suffix (default: rst)', default='rst')
+    parser.add_option('-F', '--full', action='store_true', dest='full',
+                      help='Generate a full project with sphinx-quickstart')
+    parser.add_option('-a', '--append-syspath', action='store_true',
+                      dest='append_syspath',
+                      help='Append module_path to sys.path, used when --full is given')
+    parser.add_option("-t", "--templates", action="store", type="string",
+                      dest="templates", default=None,
+                      help="Custom template directory (default: %default). "
+                      "Must contain template files package.rst and/or "
+                      "module.rst")
+    parser.add_option('-H', '--doc-project', action='store', dest='header',
+                      help='Project name (default: root module name)')
+    parser.add_option('-A', '--doc-author', action='store', dest='author',
+                      type='str',
+                      help='Project author(s), used when --full is given')
+    parser.add_option('-V', '--doc-version', action='store', dest='version',
+                      help='Project version, used when --full is given')
+    parser.add_option('-R', '--doc-release', action='store', dest='release',
+                      help='Project release, used when --full is given, '
+                      'defaults to --doc-version')
+    parser.add_option('--version', action='store_true', dest='show_version',
+                      help='Show version information and exit')
+    group = parser.add_option_group('Extension options')
+    for ext in EXTENSIONS:
+        group.add_option('--ext-' + ext, action='store_true',
+                         dest='ext_' + ext, default=False,
+                         help='enable %s extension' % ext)
+
+    (opts, args) = parser.parse_args(argv[1:])
+
+    if opts.show_version:
+        #print('Sphinx (sphinx-apidoc) %s' % __display_version__)
+        print('better-apidoc %s' % __display_version__)
+        return 0
+
+    if not args:
+        parser.error('A package path is required.')
+
+    rootpath, excludes = args[0], args[1:]
+    if not opts.destdir:
+        parser.error('An output directory is required.')
+    if opts.header is None:
+        opts.header = path.abspath(rootpath).split(path.sep)[-1]
+    if opts.suffix.startswith('.'):
+        opts.suffix = opts.suffix[1:]
+    if not path.isdir(rootpath):
+        print('%s is not a directory.' % rootpath, file=sys.stderr)
+        sys.exit(1)
+    if not path.isdir(opts.destdir):
+        if not opts.dryrun:
+            os.makedirs(opts.destdir)
+    rootpath = path.abspath(rootpath)
+    excludes = normalize_excludes(rootpath, excludes)
+    try:
+        modules = recurse_tree(rootpath, excludes, opts)
+    except TemplateNotFound as e:
+        print('Cannot find template in %s: %s' %
+              (opts.templates, e), file=sys.stderr)
+        sys.exit(1)
+
+    if opts.full:
+        raise NotImplementedError("--full not supported")
+        # This would only make sense if this script was integrated in Sphinx
+        from sphinx import quickstart as qs
+        modules.sort()
+        prev_module = ''  # type: unicode
+        text = ''
+        for module in modules:
+            if module.startswith(prev_module + '.'):
+                continue
+            prev_module = module
+            text += '   %s\n' % module
+        d = dict(
+            path = opts.destdir,
+            sep = False,
+            dot = '_',
+            project = opts.header,
+            author = opts.author or 'Author',
+            version = opts.version or '',
+            release = opts.release or opts.version or '',
+            suffix = '.' + opts.suffix,
+            master = 'index',
+            epub = True,
+            ext_autodoc = True,
+            ext_viewcode = True,
+            ext_todo = True,
+            makefile = True,
+            batchfile = True,
+            mastertocmaxdepth = opts.maxdepth,
+            mastertoctree = text,
+            language = 'en',
+            module_path = rootpath,
+            append_syspath = opts.append_syspath,
+        )
+        enabled_exts = {'ext_' + ext: getattr(opts, 'ext_' + ext)
+                        for ext in EXTENSIONS if getattr(opts, 'ext_' + ext)}
+        d.update(enabled_exts)
+
+        if isinstance(opts.header, binary_type):
+            d['project'] = d['project'].decode('utf-8')
+        if isinstance(opts.author, binary_type):
+            d['author'] = d['author'].decode('utf-8')
+        if isinstance(opts.version, binary_type):
+            d['version'] = d['version'].decode('utf-8')
+        if isinstance(opts.release, binary_type):
+            d['release'] = d['release'].decode('utf-8')
+
+        if not opts.dryrun:
+            qs.generate(d, silent=True, overwrite=opts.force)
+    elif not opts.notoc:
+        create_modules_toc_file(modules, opts)
+    return 0
+
+
+# So program can be started with "python -m sphinx.apidoc ..."
+if __name__ == "__main__":
+    main()
diff --git a/docs/generate.sh b/docs/generate.sh
index 2235e40f308ce5facb43a5766b8002b22b51d681..3d9a5a2cd055378f77edf495bd7502501676e3bc 100755
--- a/docs/generate.sh
+++ b/docs/generate.sh
@@ -1,3 +1,4 @@
 rm -rf docs/build docs/source/mod
-sphinx-apidoc -l -e -d 2 -o docs/source/mod nifty4
+#sphinx-apidoc -l -e -d 2 -o docs/source/mod nifty4
+python docs/better_apidoc.py -l -e -d 3 -t docs/generation-templates -o docs/source/mod nifty4
 sphinx-build -b html docs/source/ docs/build/
diff --git a/docs/generation-templates/module.rst b/docs/generation-templates/module.rst
new file mode 100644
index 0000000000000000000000000000000000000000..785f59f930992d7222bd21dfd6df3bbd82325ae3
--- /dev/null
+++ b/docs/generation-templates/module.rst
@@ -0,0 +1,76 @@
+{% if name %}
+{{ name }}
+{% for item in range(8 + name|length) -%}={%- endfor %}
+{% else %}
+{{ fullname }}
+{% for item in range(8 + fullname|length) -%}={%- endfor %}
+{% endif %}
+({{ fullname }} module)
+
+.. currentmodule:: {{ fullname }}
+
+.. automodule:: {{ fullname }}
+    {% if members -%}
+    :members: {{ members|join(", ") }}
+    :undoc-members:
+    :show-inheritance:
+    :member-order: bysource
+
+    Summary
+    -------
+
+    {%- if exceptions %}
+
+    Exceptions:
+
+    .. autosummary::
+        :nosignatures:
+        {% for item in exceptions %}
+        {{ item }}
+        {%- endfor %}
+    {%- endif %}
+
+    {%- if classes %}
+
+    Classes:
+
+    .. autosummary::
+        :nosignatures:
+        {% for item in classes %}
+        {{ item }}
+        {%- endfor %}
+    {%- endif %}
+
+    {%- if functions %}
+
+    Functions:
+
+    .. autosummary::
+        :nosignatures:
+        {% for item in functions %}
+        {{ item }}
+        {%- endfor %}
+    {%- endif %}
+    {%- endif %}
+
+    {%- if data %}
+
+    Data:
+
+    .. autosummary::
+        :nosignatures:
+        {% for item in data %}
+        {{ item }}
+        {%- endfor %}
+    {%- endif %}
+
+{% if all_refs %}
+    ``__all__``: {{ all_refs|join(", ") }}
+{%- endif %}
+
+
+{% if members %}
+    Reference
+    ---------
+
+{%- endif %}
diff --git a/docs/generation-templates/package.rst b/docs/generation-templates/package.rst
new file mode 100644
index 0000000000000000000000000000000000000000..490aa9506b1601410bab97ce89593038163d197b
--- /dev/null
+++ b/docs/generation-templates/package.rst
@@ -0,0 +1,103 @@
+{% if name %}
+{{ name }}
+{% for item in range(8 + name|length) -%}={%- endfor %}
+{% else %}
+{{ fullname }}
+{% for item in range(8 + fullname|length) -%}={%- endfor %}
+{% endif %}
+({{ fullname }} package)
+
+.. automodule:: {{ fullname }}
+    {% if members -%}
+    :members: {{ members|join(", ") }}
+    :undoc-members:
+    :show-inheritance:
+    {%- endif %}
+
+
+    {% if submodules %}
+    Submodules
+    ----------
+
+    .. toctree::
+        :maxdepth: 1
+        {% for item in submodules %}
+        {{ fullname }}.{{ item }}
+        {%- endfor %}
+    {%- endif -%}
+
+    {% if subpackages %}
+
+
+    Subpackages
+    -----------
+
+    .. toctree::
+        :maxdepth: 1
+        {% for item in subpackages %}
+        {{ fullname }}.{{ item }}
+        {%- endfor %}
+
+
+    {%- endif %}
+
+    {% if members %}
+    Summary
+    -------
+
+    {%- if exceptions %}
+
+    Exceptions:
+
+    .. autosummary::
+        :nosignatures:
+        {% for item in exceptions %}
+        {{ item }}
+        {%- endfor %}
+    {%- endif %}
+
+    {%- if classes %}
+
+    Classes:
+
+    .. autosummary::
+        :nosignatures:
+        {% for item in classes %}
+        {{ item }}
+        {%- endfor %}
+    {%- endif %}
+
+    {%- if functions %}
+
+    Functions:
+
+    .. autosummary::
+        :nosignatures:
+        {% for item in functions %}
+        {{ item }}
+        {%- endfor %}
+    {%- endif %}
+
+    {%- endif %}
+
+    {%- if data %}
+
+    Data:
+
+    .. autosummary::
+        :nosignatures:
+        {% for item in data %}
+        {{ item }}
+        {%- endfor %}
+    {%- endif %}
+
+{% if all_refs %}
+    ``__all__``: {{ all_refs|join(", ") }}
+{%- endif %}
+
+
+{% if members %}
+    Reference
+    ---------
+
+{%- endif %}
diff --git a/nifty4/domains/gl_space.py b/nifty4/domains/gl_space.py
index 6094f18906264f1c49719414fbb158c58b75fd6c..52e99e856ac96e7127b1d952ce51483f16f99553 100644
--- a/nifty4/domains/gl_space.py
+++ b/nifty4/domains/gl_space.py
@@ -22,14 +22,16 @@ from .structured_domain import StructuredDomain
 
 
 class GLSpace(StructuredDomain):
-    """NIFTy subclass for Gauss-Legendre pixelizations [#]_ of the two-sphere.
+    """NIFTy subclass for Gauss-Legendre pixelizations of the two-sphere.
+
+    Its harmonic partner is the :class:`LMSpace`
 
     Parameters
     ----------
     nlat : int
         Number of latitudinal bins (or rings) that are used for this
         pixelization.
-    nlon : int, *optional*
+    nlon : int, optional
         Number of longitudinal bins that are used for this pixelization.
         Default value is 2*nlat + 1.
 
@@ -37,19 +39,6 @@ class GLSpace(StructuredDomain):
     ------
     ValueError
         If input `nlat` or `nlon` is invalid.
-
-    See Also
-    --------
-    HPSpace, LMSpace
-
-    References
-    ----------
-    .. [#] M. Reinecke and D. Sverre Seljebotn, 2013, "Libsharp - spherical
-           harmonic transforms revisited";
-           `arXiv:1303.4945 <http://www.arxiv.org/abs/1303.4945>`_
-    .. [#] K.M. Gorski et al., 2005, "HEALPix: A Framework for
-           High-Resolution Discretization and Fast Analysis of Data
-           Distributed on the Sphere", *ApJ* 622..759G.
     """
 
     def __init__(self, nlat, nlon=None):
diff --git a/nifty4/domains/power_space.py b/nifty4/domains/power_space.py
index d4dc56ea0c8dd2a7f6dee6f897f676de6486ae00..1a001801927ec20fd1f63b989f370cdb5efe9e7c 100644
--- a/nifty4/domains/power_space.py
+++ b/nifty4/domains/power_space.py
@@ -24,14 +24,14 @@ from .. import dobj
 class PowerSpace(StructuredDomain):
     """NIFTy class for spaces of power spectra.
 
-    A power space is the result of a projection of a harmonic space where
+    A power space is the result of a projection of a harmonic domain where
     k-modes of equal length get mapped to one power index.
 
     Parameters
     ----------
-    harmonic_partner : Space
-        The harmonic Space of which this is the power space.
-    binbounds: None, or tuple/array/list of float
+    harmonic_partner : StructuredDomain
+        The harmonic dmain of which this is the power space.
+    binbounds : None, or tuple of float
         if None:
             There will be as many bins as there are distinct k-vector lengths
             in the harmonic partner space.
@@ -54,9 +54,9 @@ class PowerSpace(StructuredDomain):
         binbounds[0]=first_bound and binbounds[-1]=last_bound and the remaining
         values equidistantly spaced (in linear scale) between these two.
 
-        nbin: integer
+        nbin : int
             the number of bins
-        first_bound, last_bound: float
+        first_bound, last_bound : float
             the k values for the right boundary of the first bin and the left
             boundary of the last bin, respectively. They are given in length
             units of the harmonic partner space.
@@ -74,9 +74,9 @@ class PowerSpace(StructuredDomain):
         values equidistantly spaced (in natural logarithmic scale)
         between these two.
 
-        nbin: integer
+        nbin : int
             the number of bins
-        first_bound, last_bound: float
+        first_bound, last_bound : float
             the k values for the right boundary of the first bin and the left
             boundary of the last bin, respectively. They are given in length
             units of the harmonic partner space.
diff --git a/nifty4/domains/rg_space.py b/nifty4/domains/rg_space.py
index ce3cec754a031172736761c6facb10d165d4fa2b..0f3d5834a25b429d3938387db555b53bbf43120d 100644
--- a/nifty4/domains/rg_space.py
+++ b/nifty4/domains/rg_space.py
@@ -30,16 +30,16 @@ class RGSpace(StructuredDomain):
 
     Parameters
     ----------
-    shape : {int, numpy.ndarray}
+    shape : int or tuple of int
         Number of grid points or numbers of gridpoints along each axis.
-    distances : {float, numpy.ndarray}, *optional*
+    distances : None or float or tuple of float, optional
         Distance between two grid points along each axis
         (default: None).
         If distances==None:
         if harmonic==True, all distances will be set to 1
         if harmonic==False, the distance along each axis will be
         set to the inverse of the number of points along that axis.
-    harmonic : bool, *optional*
+    harmonic : bool, optional
         Whether the space represents a grid in position or harmonic space.
         (default: False).
     """
diff --git a/nifty4/field.py b/nifty4/field.py
index 35ccce8b72e9e3be02d7801ac52a1243f734f212..579311828cc5cbbf65633372f7a366f980996732 100644
--- a/nifty4/field.py
+++ b/nifty4/field.py
@@ -354,7 +354,7 @@ class Field(object):
 
         Returns
         -------
-        Field:
+        Field
             The complex conjugated field.
         """
         return Field(self._domain, self.val.conjugate())
diff --git a/nifty4/library/critical_power_energy.py b/nifty4/library/critical_power_energy.py
index f94ceb16e8dc9d8df8fc786e3f311f76b83892c5..5ec4d32f9a6cb2f3ec466c6d865312f25a45df85 100644
--- a/nifty4/library/critical_power_energy.py
+++ b/nifty4/library/critical_power_energy.py
@@ -34,33 +34,34 @@ class CriticalPowerEnergy(Energy):
 
     Parameters
     ----------
-    position : Field,
+    position : Field
         The current position of this energy. (Logarithm of power spectrum)
-    m : Field,
+    m : Field
         The map whose power spectrum is inferred. Needs to live in harmonic
         signal space.
-    D : EndomorphicOperator,
+    D : EndomorphicOperator, optional
         The curvature of the Gaussian encoding the posterior covariance.
         If not specified, the map is assumed to be no reconstruction.
         default : None
-    alpha : float
+    alpha : float, optional
         The spectral prior of the inverse gamma distribution. 1.0 corresponds
         to non-informative.
         default : 1.0
-    q : float
+    q : float, optional
         The cutoff parameter of the inverse gamma distribution. 0.0 corresponds
         to non-informative.
         default : 0.0
-    smoothness_prior : float
+    smoothness_prior : float, optional
         Controls the strength of the smoothness prior
         default : 0.0
-    logarithmic : boolean
+    logarithmic : bool, optional
         Whether smoothness acts on linear or logarithmic scale.
-    samples : integer
+        default : True
+    samples : int, optional
         Number of samples used for the estimation of the uncertainty
         corrections.
         default : 3
-    w : Field
+    w : Field, optional
         The contribution from the map with or without uncertainty. It is used
         to pass on the result of the costly sampling during the minimization.
         default : None
diff --git a/nifty4/library/nonlinear_power_energy.py b/nifty4/library/nonlinear_power_energy.py
index dc78a904b8f398c521d08f8beb2513be065142b0..55eed2dca8da353606650e1740d3e51f00ac2a8d 100644
--- a/nifty4/library/nonlinear_power_energy.py
+++ b/nifty4/library/nonlinear_power_energy.py
@@ -34,18 +34,18 @@ class NonlinearPowerEnergy(Energy):
 
     Parameters
     ----------
-    position : Field,
+    position : Field
         The current position of this energy.
-    m : Field,
+    m : Field
         The map whichs power spectrum has to be inferred
-    D : EndomorphicOperator,
+    D : EndomorphicOperator
         The curvature of the Gaussian encoding the posterior covariance.
         If not specified, the map is assumed to be no reconstruction.
         default : None
     sigma : float
         The parameter of the smoothness prior.
         default : ??? None? ???????
-    samples : integer
+    samples : int
         Number of samples used for the estimation of the uncertainty
         corrections.
         default : 3
diff --git a/nifty4/minimization/conjugate_gradient.py b/nifty4/minimization/conjugate_gradient.py
index fad9d7333beb72a7fb7216bb5a40f7bd514b1616..974bb969e63d83569468bd3747522eaafc28fcd1 100644
--- a/nifty4/minimization/conjugate_gradient.py
+++ b/nifty4/minimization/conjugate_gradient.py
@@ -56,9 +56,9 @@ class ConjugateGradient(Minimizer):
 
         Returns
         -------
-        energy : QuadraticEnergy
+        QuadraticEnergy
             state at last point of the iteration
-        status : integer
+        int
             Can be controller.CONVERGED or controller.ERROR
         """
         controller = self._controller
diff --git a/nifty4/minimization/descent_minimizer.py b/nifty4/minimization/descent_minimizer.py
index a4e45b01ccafe93c78d764f8303554c905ffd316..05a142030bf30526410afd81278b2e1f6965eb82 100644
--- a/nifty4/minimization/descent_minimizer.py
+++ b/nifty4/minimization/descent_minimizer.py
@@ -50,15 +50,15 @@ class DescentMinimizer(Minimizer):
 
         Parameters
         ----------
-        energy : Energy object
+        nergy : Energy
            Energy object which provides value, gradient and curvature at a
            specific position in parameter space.
 
         Returns
         -------
-        energy : Energy object
+        Energy
             Latest `energy` of the minimization.
-        status : integer
+        int
             Can be controller.CONVERGED or controller.ERROR
 
         Notes
diff --git a/nifty4/minimization/line_search_strong_wolfe.py b/nifty4/minimization/line_search_strong_wolfe.py
index db87675c6caf98ea7e2e57184157afd2c0542e30..c53d8ad572855e26b93fa8848b4f49587bfd7f61 100644
--- a/nifty4/minimization/line_search_strong_wolfe.py
+++ b/nifty4/minimization/line_search_strong_wolfe.py
@@ -41,13 +41,13 @@ class LineSearchStrongWolfe(LineSearch):
         Parameter for curvature condition rule. (Default: 0.9)
     max_step_size : float
         Maximum step allowed in to be made in the descent direction.
-        (Default: 50)
-    max_iterations : integer
+        (Default: 1000000000)
+    max_iterations : int, optional
         Maximum number of iterations performed by the line search algorithm.
-        (Default: 10)
-    max_zoom_iterations : integer
+        (Default: 100)
+    max_zoom_iterations : int, optional
         Maximum number of iterations performed by the zoom algorithm.
-        (Default: 10)
+        (Default: 100)
     """
 
     def __init__(self, c1=1e-4, c2=0.9,
@@ -71,18 +71,18 @@ class LineSearchStrongWolfe(LineSearch):
 
         Parameters
         ----------
-        energy : Energy object
+        energy : Energy
             Energy object from which we will calculate the energy and the
             gradient at a specific point.
         pk : Field
             Vector pointing into the search direction.
-        f_k_minus_1 : float
+        f_k_minus_1 : float, optional
             Value of the fuction (which is being minimized) at the k-1
             iteration of the line search procedure. (Default: None)
 
         Returns
         -------
-        energy_star : Energy object
+        Energy
             The new Energy object on the new position.
         """
         le_0 = LineEnergy(0., energy, pk, 0.)
@@ -188,7 +188,7 @@ class LineSearchStrongWolfe(LineSearch):
 
         Returns
         -------
-        energy_star : Energy object
+        Energy
             The new Energy object on the new position.
         """
         cubic_delta = 0.2  # cubic interpolant checks
diff --git a/nifty4/minimization/minimizer.py b/nifty4/minimization/minimizer.py
index 1c6ff8b94fa252368560a7bd5ca7505c6233c365..bbcee440eeffe0c9833c137dea6a173b3315bbbd 100644
--- a/nifty4/minimization/minimizer.py
+++ b/nifty4/minimization/minimizer.py
@@ -39,8 +39,9 @@ class Minimizer(with_metaclass(NiftyMeta, type('NewBase', (object,), {}))):
 
         Returns
         -------
-        energy : Energy object
+        Energy
             Latest `energy` of the minimization.
-        status : integer
+        int
+            exit status of the minimization
         """
         raise NotImplementedError
diff --git a/nifty4/minimization/nonlinear_cg.py b/nifty4/minimization/nonlinear_cg.py
index 32127e603ebe19e6da132ee28b44c8beb2c19bdf..4b7146651c94452b8f36f3b4532a775cac610970 100644
--- a/nifty4/minimization/nonlinear_cg.py
+++ b/nifty4/minimization/nonlinear_cg.py
@@ -51,9 +51,9 @@ class NonlinearCG(Minimizer):
 
         Returns
         -------
-        energy :
+        Energy
             state at last point of the iteration
-        status : integer
+        int
             Can be controller.CONVERGED or controller.ERROR
         """
         controller = self._controller
diff --git a/nifty4/minimization/vl_bfgs.py b/nifty4/minimization/vl_bfgs.py
index f9da3dd3d4692d241231c24b5d5fd1ef6898d07d..e9239417a7617f9c2328d707d4f19cdeb416a3cd 100644
--- a/nifty4/minimization/vl_bfgs.py
+++ b/nifty4/minimization/vl_bfgs.py
@@ -87,7 +87,7 @@ class InformationStore(object):
 
     Parameters
     ----------
-    max_history_length : integer
+    max_history_length : int
         Maximum number of stored past updates.
     x0 : Field
         Initial position in variable space.
@@ -96,7 +96,7 @@ class InformationStore(object):
 
     Attributes
     ----------
-    max_history_length : integer
+    max_history_length : int
         Maximum number of stored past updates.
     s : List
         Circular buffer of past position differences, which are Fields.
@@ -106,7 +106,7 @@ class InformationStore(object):
         Latest position in variable space.
     last_gradient : Field
         Gradient at latest position.
-    k : integer
+    k : int
         Number of updates that have taken place
     ss : numpy.ndarray
         2D circular buffer of scalar products between different elements of s.
@@ -139,7 +139,7 @@ class InformationStore(object):
 
         Returns
         -------
-        result : List
+        List
             List of new basis vectors.
         """
         result = []
@@ -165,7 +165,7 @@ class InformationStore(object):
 
         Returns
         -------
-        result : numpy.ndarray
+        numpy.ndarray
             Scalar matrix.
         """
         m = self.history_length
@@ -207,7 +207,7 @@ class InformationStore(object):
 
         Returns
         -------
-        delta : List
+        List
             List of the new scalar coefficients (deltas).
         """
         m = self.history_length
diff --git a/nifty4/operators/fft_smoothing_operator.py b/nifty4/operators/fft_smoothing_operator.py
index 239b6e1dae41e05c70d535e4704879e495718a01..4967cf77708a6f8931f2c056f17c062306c67308 100644
--- a/nifty4/operators/fft_smoothing_operator.py
+++ b/nifty4/operators/fft_smoothing_operator.py
@@ -30,7 +30,7 @@ def FFTSmoothingOperator(domain, sigma, space=None):
 
     Parameters
     ----------
-    domain : Domain, tuple of Domains, or DomainTuple
+    domain : Domain, tuple of Domain, or DomainTuple
        The total domain of the operator's input and output fields
 
     sigma : float>=0
@@ -38,7 +38,7 @@ def FFTSmoothingOperator(domain, sigma, space=None):
        the RGSpace the operator is working on.
        If `sigma==0`, an identity operator will be returned.
 
-    space: integer, *optional*
+    space : int, optional
        The index of the sub-domain on which the smoothing is performed.
        Can be omitted if `domain` only has one sub-domain.
 
diff --git a/nifty4/operators/harmonic_transform_operator.py b/nifty4/operators/harmonic_transform_operator.py
index bcae0c564357122fb58ac0065df6e1b927118f73..598820833e6037d4ade4e15cd0571a1d2ffdc04a 100644
--- a/nifty4/operators/harmonic_transform_operator.py
+++ b/nifty4/operators/harmonic_transform_operator.py
@@ -39,18 +39,19 @@ class HarmonicTransformOperator(LinearOperator):
 
     Parameters
     ----------
-    domain: Space, tuple of Spaces or DomainObject
+    domain : Domain, tuple of Domain or DomainTuple
         The domain of the data that is input by "times" and output by
         "adjoint_times".
-    target: Space (optional)
-        The target space of the transform operation.
-        If omitted, a space will be chosen automatically.
-        Whenever the input space of the transform is an RGSpace, the codomain
+    target : Domain, optional
+        The target domain of the transform operation.
+        If omitted, a domain will be chosen automatically.
+        Whenever the input domain of the transform is an RGSpace, the codomain
         (and its parameters) are uniquely determined.
         For LMSpace, a GLSpace of sufficient resolution is chosen.
-    space: the index of the space on which the operator should act
-        If None, it is set to 0 if domain contains exactly one space.
-        domain[space] must be a harmonic space.
+    space : int, optional
+        The index of the domain on which the operator should act
+        If None, it is set to 0 if domain contains exactly one subdomain.
+        domain[space] must be a harmonic domain.
     """
 
     def __init__(self, domain, target=None, space=None):
diff --git a/nifty4/operators/laplace_operator.py b/nifty4/operators/laplace_operator.py
index 8b40a19fc4455d1302f2bccfa6d9e94990e8c50b..322e664342d522003c9f27984fa5c5ab07bbbfea 100644
--- a/nifty4/operators/laplace_operator.py
+++ b/nifty4/operators/laplace_operator.py
@@ -35,7 +35,7 @@ class LaplaceOperator(EndomorphicOperator):
 
     Parameters
     ----------
-    logarithmic : boolean,
+    logarithmic : bool, optional
         Whether smoothness is calculated on a logarithmic scale or linear scale
         default : True
     space : int
diff --git a/nifty4/operators/linear_operator.py b/nifty4/operators/linear_operator.py
index 96009d7230ef6511b3bc3d57991515a3f32104ee..0993eb1cec9d3ff347a520af6e5dd5579bbebf10 100644
--- a/nifty4/operators/linear_operator.py
+++ b/nifty4/operators/linear_operator.py
@@ -57,7 +57,7 @@ class LinearOperator(with_metaclass(
     @abc.abstractproperty
     def domain(self):
         """
-        domain : DomainTuple
+        DomainTuple
             The domain on which the Operator's input Field lives.
             Every Operator which inherits from the abstract LinearOperator
             base class must have this attribute.
@@ -67,7 +67,7 @@ class LinearOperator(with_metaclass(
     @abc.abstractproperty
     def target(self):
         """
-        target : DomainTuple
+        DomainTuple
             The domain on which the Operator's output Field lives.
             Every Operator which inherits from the abstract LinearOperator
             base class must have this attribute.
@@ -77,7 +77,7 @@ class LinearOperator(with_metaclass(
     @property
     def inverse(self):
         """
-        inverse : LinearOperator
+        LinearOperator
             Returns a LinearOperator object which behaves as if it were
             the inverse of this operator.
         """
@@ -87,7 +87,7 @@ class LinearOperator(with_metaclass(
     @property
     def adjoint(self):
         """
-        adjoint : LinearOperator
+        LinearOperator
             Returns a LinearOperator object which behaves as if it were
             the adjoint of this operator.
         """
@@ -141,7 +141,7 @@ class LinearOperator(with_metaclass(
 
         Returns
         -------
-        out : integer
+        int
             This is any subset of LinearOperator.{TIMES, ADJOINT_TIMES,
             INVERSE_TIMES, ADJOINT_INVERSE_TIMES, INVERSE_ADJOINT_TIMES},
             joined together by the "|" operator.
@@ -158,7 +158,7 @@ class LinearOperator(with_metaclass(
             The input Field, living on the Operator's domain or target,
             depending on mode.
 
-        mode : integer
+        mode : int
             LinearOperator.TIMES: normal application
             LinearOperator.ADJOINT_TIMES: adjoint application
             LinearOperator.INVERSE_TIMES: inverse application
@@ -168,7 +168,7 @@ class LinearOperator(with_metaclass(
 
         Returns
         -------
-        out : Field
+        Field
             The processed Field living on the Operator's target or domain,
             depending on mode.
         """
@@ -188,7 +188,7 @@ class LinearOperator(with_metaclass(
 
         Returns
         -------
-        out : Field
+        Field
             The processed Field living on the Operator's target domain.
         """
         return self.apply(x, self.TIMES)
@@ -203,7 +203,7 @@ class LinearOperator(with_metaclass(
 
         Returns
         -------
-        out : Field
+        Field
             The processed Field living on the Operator's domain.
         """
         return self.apply(x, self.INVERSE_TIMES)
@@ -218,7 +218,7 @@ class LinearOperator(with_metaclass(
 
         Returns
         -------
-        out : Field
+        Field
             The processed Field living on the Operator's domain.
         """
         return self.apply(x, self.ADJOINT_TIMES)
@@ -233,7 +233,7 @@ class LinearOperator(with_metaclass(
 
         Returns
         -------
-        out : Field
+        Field
             The processed Field living on the Operator's target domain.
 
         Notes
@@ -267,7 +267,7 @@ class LinearOperator(with_metaclass(
 
         Returns
         -------
-        sample : Field
-            Returns the a sample from the Gaussian of given covariance.
+        Field
+            A sample from the Gaussian of given covariance.
         """
         raise NotImplementedError
diff --git a/nifty4/operators/smoothness_operator.py b/nifty4/operators/smoothness_operator.py
index 682578f40572e0448a6d249c1d7476b10741b63d..adb554ddbb02faf3c25523c56ea31de423589845 100644
--- a/nifty4/operators/smoothness_operator.py
+++ b/nifty4/operators/smoothness_operator.py
@@ -36,9 +36,9 @@ def SmoothnessOperator(domain, strength=1., logarithmic=True, space=None):
 
     Parameters
     ----------
-    strength: nonnegative float
+    strength : nonnegative float
         Specifies the strength of the SmoothnessOperator
-    logarithmic : boolean
+    logarithmic : bool, optional
         Whether smoothness is calculated on a logarithmic scale or linear scale
         default : True
     """
diff --git a/nifty4/sugar.py b/nifty4/sugar.py
index 16f2dd70537ea997a18323818aa2c6ccb2667fb3..a2f665efc6db1396a9eb4d7b434bfd2971983dae 100644
--- a/nifty4/sugar.py
+++ b/nifty4/sugar.py
@@ -63,13 +63,14 @@ def power_analyze(field, spaces=None, binbounds=None,
     ----------
     field : Field
         The field to be analyzed
-    spaces : int *optional*
-        The set of subspaces for which the powerspectrum shall be computed.
+    spaces : None or int or tuple of int , optional
+        The set of subdomains for which the powerspectrum shall be computed.
+        If None, all subdomains will be converted.
         (default : None).
-    binbounds : array-like *optional*
+    binbounds : None or array-like, optional
         Inner bounds of the bins (default : None).
-        if binbounds==None : bins are inferred.
-    keep_phase_information : boolean, *optional*
+        if binbounds is None : bins are inferred.
+    keep_phase_information : bool, optional
         If False, return a real-valued result containing the power spectrum
         of the input Field.
         If True, return a complex-valued result whose real component
@@ -82,7 +83,7 @@ def power_analyze(field, spaces=None, binbounds=None,
 
     Returns
     -------
-    out : Field
+    Field
         The output object. Its domain is a PowerSpace and it contains
         the power spectrum of 'field'.
     """
@@ -132,7 +133,7 @@ def power_synthesize_nonrandom(field, spaces=None):
 
 
 def power_synthesize(field, spaces=None, real_power=True, real_signal=True):
-    """ Yields a sampled field with `field`**2 as its power spectrum.
+    """Returns a sampled field with `field`**2 as its power spectrum.
 
     This method draws a Gaussian random field in the harmonic partner
     domain of this field's domains, using this field as power spectrum.
@@ -141,20 +142,20 @@ def power_synthesize(field, spaces=None, real_power=True, real_signal=True):
     ----------
     field : Field
         The input field containing the square root of the power spectrum
-    spaces : {tuple, int, None} *optional*
-        Specifies the subspace containing all the PowerSpaces which
+    spaces : None, int, or tuple of int, optional
+        Specifies the subdomains containing all the PowerSpaces which
         should be converted (default : None).
-        if spaces==None : Tries to convert the whole domain.
-    real_power : boolean *optional*
+        if spaces is None : Tries to convert the whole domain.
+    real_power : bool, optional
         Determines whether the power spectrum is treated as intrinsically
         real or complex (default : True).
-    real_signal : boolean *optional*
+    real_signal : bool, optional
         True will result in a purely real signal-space field
         (default : True).
 
     Returns
     -------
-    out : Field
+    Field
         The output object. A random field created with the power spectrum
         stored in the `spaces` in `field`.
 
@@ -166,7 +167,7 @@ def power_synthesize(field, spaces=None, real_power=True, real_signal=True):
 
     Raises
     ------
-    ValueError : If domain specified by `spaces` is not a PowerSpace.
+    ValueError : If a domain specified by `spaces` is not a PowerSpace.
     """
 
     spec = power_synthesize_nonrandom(field, spaces)
@@ -213,20 +214,21 @@ def create_power_operator(domain, power_spectrum, space=None, dtype=None):
 
     Parameters
     ----------
-    domain : DomainObject
+    domain : Domain, tuple of Domain or DomainTuple
         Domain over which the power operator shall live.
-    power_spectrum : callable of Field
-        An object that implements the power spectrum as a function of k.
+    power_spectrum : callable or Field
+        An object that contains the power spectrum as a function of k.
     space : int
-            the domain index on which the power operator will work
-    dtype : type *optional*
+        the domain index on which the power operator will work
+    dtype : None or type, optional
         dtype that the field holding the power spectrum shall use
         (default : None).
-        if dtype == None: the dtype of `power_spectrum` will be used.
+        if dtype is None: the dtype of `power_spectrum` will be used.
 
     Returns
     -------
-    DiagonalOperator : An operator that implements the given power spectrum.
+    DiagonalOperator
+        An operator that implements the given power spectrum.
     """
     domain = DomainTuple.make(domain)
     if space is None: