Commit cb30efb6 authored by Martin Reinecke's avatar Martin Reinecke
Browse files

merge

parents fd73e398 4fb590bb
#FROM ubuntu:artful
FROM debian:testing-slim
# dependencies via apt
RUN apt-get update
ADD ci/install_basics.sh /tmp/install_basics.sh
RUN sh /tmp/install_basics.sh
# python dependencies
ADD ci/requirements.txt /tmp/requirements.txt
RUN pip install --process-dependency-links -r /tmp/requirements.txt
# copy sources and install nifty
COPY . /tmp/NIFTy
RUN pip install /tmp/NIFTy
# Cleanup
RUN rm -r /tmp/*
parameterized
coverage
git+https://gitlab.mpcdf.mpg.de/ift/pyHealpix.git
sphinx
sphinx==1.6.7
sphinx_rtd_theme
numpydoc
......@@ -8,7 +8,7 @@ np.random.seed(42)
if __name__ == "__main__":
# Set up position space
s_space = ift.RGSpace([128, 128])
#s_space = ift.HPSpace(32)
# s_space = ift.HPSpace(32)
# Define harmonic transformation and associated harmonic space
h_space = s_space.get_default_codomain()
......@@ -63,7 +63,8 @@ if __name__ == "__main__":
tol_abs_gradnorm=1e-15)
minimizer = ift.RelaxedNewton(IC1)
ICI = ift.GradientNormController(iteration_limit=500, tol_abs_gradnorm=1e-10)
ICI = ift.GradientNormController(iteration_limit=500,
tol_abs_gradnorm=1e-10)
map_inverter = ift.ConjugateGradient(controller=ICI)
ICI2 = ift.GradientNormController(iteration_limit=200,
......
......@@ -53,8 +53,9 @@ if __name__ == "__main__":
MaskOperator = ift.DiagonalOperator(mask)
R = ift.GeometryRemover(s_space)
R = R*MaskOperator
#R = R*HT
#R = R * ift.create_harmonic_smoothing_operator((harmonic_space,),0,response_sigma)
# R = R*HT
# R = R * ift.create_harmonic_smoothing_operator((harmonic_space,), 0,
# response_sigma)
MeasurementOperator = R
d_space = MeasurementOperator.target
......
......@@ -71,8 +71,8 @@ if __name__ == "__main__":
mask_2 = ift.Field(signal_space_2, ift.dobj.from_global_data(mask_2))
R = ift.GeometryRemover(signal_domain)
R = R*ift.DiagonalOperator(mask_1, signal_domain,spaces=0)
R = R*ift.DiagonalOperator(mask_2, signal_domain,spaces=1)
R = R*ift.DiagonalOperator(mask_1, signal_domain, spaces=0)
R = R*ift.DiagonalOperator(mask_2, signal_domain, spaces=1)
R = R*ht
R = R * ift.create_harmonic_smoothing_operator(harmonic_domain, 0,
response_sigma_1)
......@@ -101,11 +101,13 @@ if __name__ == "__main__":
plotdict = {"colormap": "Planck-like"}
plot_space = ift.RGSpace((N_pixels_1, N_pixels_2))
ift.plot(ift.Field(plot_space,val=ht(mock_signal).val), name='mock_signal.png',
**plotdict)
ift.plot(ift.Field(plot_space,val=data.val), name='data.png', **plotdict)
ift.plot(ift.Field(plot_space,val=m.val), name='map.png', **plotdict)
ift.plot(ift.Field(plot_space, val=ht(mock_signal).val),
name='mock_signal.png', **plotdict)
ift.plot(ift.Field(plot_space, val=data.val), name='data.png', **plotdict)
ift.plot(ift.Field(plot_space, val=m.val), name='map.png', **plotdict)
# sampling the uncertainty map
mean, variance = ift.probe_with_posterior_samples(wiener_curvature, ht, 10)
ift.plot(ift.Field(plot_space, val=ift.sqrt(variance).val), name="uncertainty.png", **plotdict)
ift.plot(ift.Field(plot_space, val=(mean+m).val), name="posterior_mean.png", **plotdict)
ift.plot(ift.Field(plot_space, val=ift.sqrt(variance).val),
name="uncertainty.png", **plotdict)
ift.plot(ift.Field(plot_space, val=(mean+m).val),
name="posterior_mean.png", **plotdict)
......@@ -38,7 +38,8 @@ if __name__ == "__main__":
R = ift.GeometryRemover(signal_space)
R = R*ift.DiagonalOperator(mask)
R = R*ht
R = R * ift.create_harmonic_smoothing_operator((harmonic_space,),0,response_sigma)
R = R * ift.create_harmonic_smoothing_operator((harmonic_space,), 0,
response_sigma)
data_domain = R.target[0]
noiseless_data = R(mock_signal)
......
......@@ -85,5 +85,5 @@ if __name__ == "__main__":
# try to do the same with diagonal probing
variance = ift.probe_diagonal(ht*curv.inverse*ht.adjoint, 100)
#sm = ift.FFTSmoothingOperator(s_space, sigma=0.005)
ift.plot(variance, name="posterior_variance2.png", **plotdict2)
# sm = ift.FFTSmoothingOperator(s_space, sigma=0.015)
ift.plot(variance, name="posterior_variance2.png", **plotdict)
# -*- coding: utf-8 -*-
"""
better apidoc
~~~~~~~~~~~~~
Parses a directory tree looking for Python modules and packages and creates
ReST files appropriately to create code documentation with Sphinx. It also
creates a modules index (named modules.<suffix>).
This is derived from the "sphinx-apidoc" script, which is:
Copyright 2007-2016 by the Sphinx team
http://www.sphinx-doc.org
It extends "sphinx-apidoc" by the --template / -t option, which allows to
render the output ReST files based on arbitrary Jinja templates.
:copyright: Copyright 2017 by Michael Goerz
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import os
import sys
import importlib
import optparse
from os import path
from six import binary_type
from fnmatch import fnmatch
from jinja2 import FileSystemLoader, TemplateNotFound
from jinja2.sandbox import SandboxedEnvironment
from sphinx.util.osutil import FileAvoidWrite, walk
#from sphinx import __display_version__
from sphinx.quickstart import EXTENSIONS
from sphinx.ext.autosummary import get_documenter
from sphinx.util.inspect import safe_getattr
# Add documenters to AutoDirective registry
from sphinx.ext.autodoc import add_documenter, \
ModuleDocumenter, ClassDocumenter, ExceptionDocumenter, DataDocumenter, \
FunctionDocumenter, MethodDocumenter, AttributeDocumenter, \
InstanceAttributeDocumenter
add_documenter(ModuleDocumenter)
add_documenter(ClassDocumenter)
add_documenter(ExceptionDocumenter)
add_documenter(DataDocumenter)
add_documenter(FunctionDocumenter)
add_documenter(MethodDocumenter)
add_documenter(AttributeDocumenter)
add_documenter(InstanceAttributeDocumenter)
__version__ = '0.1.2'
__display_version__ = __version__
if False:
# For type annotation
from typing import Any, List, Tuple # NOQA
# automodule options
if 'SPHINX_APIDOC_OPTIONS' in os.environ:
OPTIONS = os.environ['SPHINX_APIDOC_OPTIONS'].split(',')
else:
OPTIONS = [
'members',
'undoc-members',
# 'inherited-members', # disabled because there's a bug in sphinx
'show-inheritance',
]
INITPY = '__init__.py'
PY_SUFFIXES = set(['.py', '.pyx'])
def _warn(msg):
# type: (unicode) -> None
print('WARNING: ' + msg, file=sys.stderr)
def makename(package, module):
# type: (unicode, unicode) -> unicode
"""Join package and module with a dot."""
# Both package and module can be None/empty.
if package:
name = package
if module:
name += '.' + module
else:
name = module
return name
def write_file(name, text, opts):
# type: (unicode, unicode, Any) -> None
"""Write the output file for module/package <name>."""
fname = path.join(opts.destdir, '%s.%s' % (name, opts.suffix))
if opts.dryrun:
print('Would create file %s.' % fname)
return
if not opts.force and path.isfile(fname):
print('File %s already exists, skipping.' % fname)
else:
print('Creating file %s.' % fname)
with FileAvoidWrite(fname) as f:
f.write(text)
def format_heading(level, text):
# type: (int, unicode) -> unicode
"""Create a heading of <level> [1, 2 or 3 supported]."""
underlining = ['=', '-', '~', ][level - 1] * len(text)
return '%s\n%s\n\n' % (text, underlining)
def format_directive(module, package=None):
# type: (unicode, unicode) -> unicode
"""Create the automodule directive and add the options."""
directive = '.. automodule:: %s\n' % makename(package, module)
for option in OPTIONS:
directive += ' :%s:\n' % option
return directive
def create_module_file(package, module, opts):
# type: (unicode, unicode, Any) -> None
"""Generate RST for a top-level module (i.e., not part of a package)"""
if not opts.noheadings:
text = format_heading(1, '%s module' % module)
else:
text = ''
# text += format_heading(2, ':mod:`%s` Module' % module)
text += format_directive(module, package)
if opts.templates:
template_loader = FileSystemLoader(opts.templates)
template_env = SandboxedEnvironment(loader=template_loader)
try:
mod_ns = _get_mod_ns(
name=module, fullname=module,
includeprivate=opts.includeprivate)
template = template_env.get_template('module.rst')
text = template.render(**mod_ns)
except ImportError as e:
_warn('failed to import %r: %s' % (module, e))
write_file(makename(package, module), text, opts)
def _get_members(
mod, typ=None, include_imported=False, as_refs=False, in__all__=False):
"""Get (filtered) public/total members of the module or package `mod`.
Args:
mod: object resulting from importing a module or package
typ: filter on members. If None, include all members. If one of
'function', 'class', 'exception', 'data', only include members of
the matching type
include_imported: If True, also include members that are imports
as_refs: If True, return ReST-formatted reference strings for all
members, instead of just their names. In combinations with
`include_imported` or `in__all__`, these link to the original
location where the member is defined
in__all__: If True, return only members that are in ``mod.__all__``
Returns:
lists `public` and `items`. The lists contains the public and private +
public members, as strings.
Note:
For data members, there is no way to tell whether they were imported or
defined locally (without parsing the source code). A module may define
one or both attributes
__local_data__: list of names of data objects defined locally
__imported_data__: dict of names to ReST-formatted references of where
a data object originates
If either one of these attributes is present, the member will be
classified accordingly. Otherwise, it will be classified as local if it
appeard in the __all__ list, or as imported otherwise
"""
roles = {'function': 'func', 'module': 'mod', 'class': 'class',
'exception': 'exc', 'data': 'data'}
# not included, because they cannot occur at modul level:
# 'method': 'meth', 'attribute': 'attr', 'instanceattribute': 'attr'
def check_typ(typ, mod, member):
"""Check if mod.member is of the desired typ"""
documenter = get_documenter(member, mod)
if typ is None:
return True
if typ == getattr(documenter, 'objtype', None):
return True
if hasattr(documenter, 'directivetype'):
return roles[typ] == getattr(documenter, 'directivetype')
def is_local(mod, member, name):
"""Check whether mod.member is defined locally in module mod"""
if hasattr(member, '__module__'):
return getattr(member, '__module__') == mod.__name__
else:
# we take missing __module__ to mean the member is a data object
if hasattr(mod, '__local_data__'):
return name in getattr(mod, '__local_data__')
if hasattr(mod, '__imported_data__'):
return name not in getattr(mod, '__imported_data__')
else:
return name in getattr(mod, '__all__', [])
if typ is not None and typ not in roles:
raise ValueError("typ must be None or one of %s"
% str(list(roles.keys())))
items = []
public = []
all_list = getattr(mod, '__all__', [])
for name in dir(mod):
try:
member = safe_getattr(mod, name)
except AttributeError:
continue
if check_typ(typ, mod, member):
if in__all__ and name not in all_list:
continue
if include_imported or is_local(mod, member, name):
if as_refs:
documenter = get_documenter(member, mod)
role = roles.get(documenter.objtype, 'obj')
ref = _get_member_ref_str(
name, obj=member, role=role,
known_refs=getattr(mod, '__imported_data__', None))
items.append(ref)
if not name.startswith('_'):
public.append(ref)
else:
items.append(name)
if not name.startswith('_'):
public.append(name)
return public, items
def _get_member_ref_str(name, obj, role='obj', known_refs=None):
"""generate a ReST-formmated reference link to the given `obj` of type
`role`, using `name` as the link text"""
if known_refs is not None:
if name in known_refs:
return known_refs[name]
if hasattr(obj, '__name__'):
try:
ref = obj.__module__ + '.' + obj.__name__
except AttributeError:
ref = obj.__name__
except TypeError: # e.g. obj.__name__ is None
ref = name
else:
ref = name
return ":%s:`%s <%s>`" % (role, name, ref)
def _get_mod_ns(name, fullname, includeprivate):
"""Return the template context of module identified by `fullname` as a
dict"""
ns = { # template variables
'name': name, 'fullname': fullname, 'members': [], 'functions': [],
'classes': [], 'exceptions': [], 'subpackages': [], 'submodules': [],
'all_refs': [], 'members_imports': [], 'members_imports_refs': [],
'data': [], 'doc':None}
p = 0
if includeprivate:
p = 1
mod = importlib.import_module(fullname)
ns['members'] = _get_members(mod)[p]
ns['functions'] = _get_members(mod, typ='function')[p]
ns['classes'] = _get_members(mod, typ='class')[p]
ns['exceptions'] = _get_members(mod, typ='exception')[p]
ns['all_refs'] = _get_members(mod, include_imported=True, in__all__=True, as_refs=True)[p]
ns['members_imports'] = _get_members(mod, include_imported=True)[p]
ns['members_imports_refs'] = _get_members(mod, include_imported=True, as_refs=True)[p]
ns['data'] = _get_members(mod, typ='data')[p]
ns['doc'] = mod.__doc__
return ns
def create_package_file(root, master_package, subroot, py_files, opts, subs, is_namespace):
# type: (unicode, unicode, unicode, List[unicode], Any, List[unicode], bool) -> None
"""Build the text of the file and write the file."""
use_templates = False
if opts.templates:
use_templates = True
template_loader = FileSystemLoader(opts.templates)
template_env = SandboxedEnvironment(loader=template_loader)
fullname = makename(master_package, subroot)
text = format_heading(
1, ('%s package' if not is_namespace else "%s namespace") % fullname)
if opts.modulefirst and not is_namespace:
text += format_directive(subroot, master_package)
text += '\n'
# build a list of directories that are szvpackages (contain an INITPY file)
subs = [sub for sub in subs if path.isfile(path.join(root, sub, INITPY))]
# if there are some package directories, add a TOC for theses subpackages
if subs:
text += format_heading(2, 'Subpackages')
text += '.. toctree::\n\n'
for sub in subs:
text += ' %s.%s\n' % (makename(master_package, subroot), sub)
text += '\n'
submods = [path.splitext(sub)[0] for sub in py_files
if not shall_skip(path.join(root, sub), opts) and
sub != INITPY]
if submods:
text += format_heading(2, 'Submodules')
if opts.separatemodules:
text += '.. toctree::\n\n'
for submod in submods:
modfile = makename(master_package, makename(subroot, submod))
text += ' %s\n' % modfile
# generate separate file for this module
if not opts.noheadings:
filetext = format_heading(1, '%s module' % modfile)
else:
filetext = ''
filetext += format_directive(makename(subroot, submod),
master_package)
if use_templates:
try:
mod_ns = _get_mod_ns(
name=submod, fullname=modfile,
includeprivate=opts.includeprivate)
template = template_env.get_template('module.rst')
filetext = template.render(**mod_ns)
except ImportError as e:
_warn('failed to import %r: %s' % (modfile, e))
write_file(modfile, filetext, opts)
else:
for submod in submods:
modfile = makename(master_package, makename(subroot, submod))
if not opts.noheadings:
text += format_heading(2, '%s module' % modfile)
text += format_directive(makename(subroot, submod),
master_package)
text += '\n'
text += '\n'
if use_templates:
try:
package_ns = _get_mod_ns(name=subroot, fullname=fullname,
includeprivate=opts.includeprivate)
package_ns['subpackages'] = subs
package_ns['submodules'] = submods
template = template_env.get_template('package.rst')
text = template.render(**package_ns)
except ImportError as e:
_warn('failed to import %r: %s' % (fullname, e))
else:
if not opts.modulefirst and not is_namespace:
text += format_heading(2, 'Module contents')
text += format_directive(subroot, master_package)
write_file(makename(master_package, subroot), text, opts)
def create_modules_toc_file(modules, opts, name='modules'):
# type: (List[unicode], Any, unicode) -> None
"""Create the module's index."""
text = format_heading(1, '%s' % opts.header)
text += '.. toctree::\n'
text += ' :maxdepth: %s\n\n' % opts.maxdepth
modules.sort()
prev_module = '' # type: unicode
for module in modules:
# look if the module is a subpackage and, if yes, ignore it
if module.startswith(prev_module + '.'):
continue
prev_module = module
text += ' %s\n' % module
write_file(name, text, opts)
def shall_skip(module, opts):
# type: (unicode, Any) -> bool
"""Check if we want to skip this module."""
# skip if the file doesn't exist and not using implicit namespaces
if not opts.implicit_namespaces and not path.exists(module):
return True
# skip it if there is nothing (or just \n or \r\n) in the file
if path.exists(module) and path.getsize(module) <= 2:
return True
# skip if it has a "private" name and this is selected
filename = path.basename(module)
if filename != '__init__.py' and filename.startswith('_') and \
not opts.includeprivate:
return True
return False
def recurse_tree(rootpath, excludes, opts):
# type: (unicode, List[unicode], Any) -> List[unicode]
"""
Look for every file in the directory tree and create the corresponding
ReST files.
"""
# check if the base directory is a package and get its name
if INITPY in os.listdir(rootpath):
root_package = rootpath.split(path.sep)[-1]
else:
# otherwise, the base is a directory with packages
root_package = None
toplevels = []
followlinks = getattr(opts, 'followlinks', False)
includeprivate = getattr(opts, 'includeprivate', False)
implicit_namespaces = getattr(opts, 'implicit_namespaces', False)
for root, subs, files in walk(rootpath, followlinks=followlinks):
# document only Python module files (that aren't excluded)
py_files = sorted(f for f in files
if path.splitext(f)[1] in PY_SUFFIXES and
not is_excluded(path.join(root, f), excludes))
is_pkg = INITPY in py_files
is_namespace = INITPY not in py_files and implicit_namespaces
if is_pkg:
py_files.remove(INITPY)
py_files.insert(0, INITPY)
elif root != rootpath:
# only accept non-package at toplevel unless using implicit namespaces
if not implicit_namespaces:
del subs[:]
continue
# remove hidden ('.') and private ('_') directories, as well as
# excluded dirs
if includeprivate:
exclude_prefixes = ('.',) # type: Tuple[unicode, ...]
else:
exclude_prefixes = ('.', '_')
subs[:] = sorted(sub for sub in subs if not sub.startswith(exclude_prefixes) and
not is_excluded(path.join(root, sub), excludes))
if is_pkg or is_namespace:
# we are in a package with something to document
if subs or len(py_files) > 1 or not shall_skip(path.join(root, INITPY), opts):
subpackage = root[len(rootpath):].lstrip(path.sep).\
replace(path.sep, '.')
# if this is not a namespace or
# a namespace and there is something there to document
if not is_namespace or len(py_files) > 0:
create_package_file(root, root_package, subpackage,
py_files, opts, subs, is_namespace)
toplevels.append(makename(root_package, subpackage))
else:
# if we are at the root level, we don't require it to be a package
assert root == rootpath and root_package is None
if opts.templates:
sys.path.insert(0, rootpath)
for py_file in py_files:
if not shall_skip(path.join(rootpath, py_file), opts):
module = path.splitext(py_file)[0]
create_module_file(root_package, module, opts)
toplevels.append(module)
if opts.templates:
sys.path.pop(0)
return toplevels
def normalize_excludes(rootpath, excludes):
# type: (unicode, List[unicode]) -> List[unicode]
"""Normalize the excluded directory list."""
return [path.abspath(exclude) for exclude in excludes]
def is_excluded(root, excludes):
# type: (unicode, List[unicode]) -> bool
"""Check if the directory is in the exclude list.
Note: by having trailing slashes, we avoid common prefix issues, like
e.g. an exlude "foo" also accidentally excluding "foobar".
"""
for exclude in excludes:
if fnmatch(root, exclude):