diff --git a/common/python/nomadcore/local_backend.py b/common/python/nomadcore/local_backend.py
index fac03a4993900f9a273130b59b813e57a2e93d0d..fe7a9bd776deb6a701a42c677b7a35cbab3bbd20 100644
--- a/common/python/nomadcore/local_backend.py
+++ b/common/python/nomadcore/local_backend.py
@@ -67,7 +67,7 @@ class LocalBackend(object):
 
         # metaInfoEnv.infoKinds.items() gives a dictionary with keys meta_info
         # and the associated value a nomadcore.local_meta_info.InfoKindEl object.
-        for ikNames, ik in metaInfoEnv.infoKinds.items():
+        for _, ik in metaInfoEnv.infoKinds.items():
             if ik.kindStr == "type_section":
                 parentS = list(metaInfoEnv.firstAncestorsByType(ik.name).get("type_section", [[]])[0])
                 parentS.sort()
@@ -75,7 +75,7 @@ class LocalBackend(object):
                     metaInfo=ik,
                     parentSectionNames=parentS, debug=self.debug)
         # We go through each key, value in the dictionary of meta infos
-        for ikNames, ik in metaInfoEnv.infoKinds.items():
+        for _, ik in metaInfoEnv.infoKinds.items():
             if ik.kindStr == "type_document_content" or ik.kindStr == "type_dimension":
                 # Now we find out what the supersections are of this meta_info
                 superSectionNames = metaInfoEnv.firstAncestorsByType(ik.name).get("type_section", [[]])[0]
@@ -191,6 +191,23 @@ class LocalBackend(object):
 
         dataManager.superSectionManager.addArrayValues(dataManager.metaInfo, values, gIndex=gIndex, **kwargs)
 
+    def get_value(self, meta_name, g_index=-1):
+        dataManager = self.results._datamanagers.get(meta_name)
+        if dataManager is None:
+            return None
+
+        return dataManager.superSectionManager.get_value(dataManager.metaInfo, g_index)
+
+    def get_sections(self, meta_name, g_index=-1):
+        if g_index == -1:
+            sections = self.results._sectionmanagers[meta_name].openSections
+        else:
+            sectionManager = self.results._sectionmanagers[meta_name]
+            parent_meta_name = self.results._sectionmanagers[meta_name].parentSectionNames[0]
+            sections = self.results._sectionmanagers[parent_meta_name].get_subsections(sectionManager.metaInfo, g_index)
+
+        return [section.gIndex for section in sections]
+
     def setSectionInfo(self, metaName, gIndex, references):
         """
         Sets info values of an open section references should be a dictionary with the
@@ -375,11 +392,11 @@ class Results(object):
             yield (name, ParserEvent.close_section, i_section)
 
             # for value_name, value_value in section.simple_values.items():
-                # yield (value_name, ParserEvent.add_value, value_value)
+            #     yield (value_name, ParserEvent.add_value, value_value)
             # for array_name, array_value in section.array_values.items():
-                # yield (array_name, ParserEvent.add_array_value, array_value)
+            #     yield (array_name, ParserEvent.add_array_value, array_value)
             # for x in self.traverse_recursive(section.subsections):
-                # yield x
+            #     yield x
 
     def print_summary(self):
         """Return a string representing the data contained in the results. This
@@ -436,11 +453,11 @@ class Results(object):
                     return
 
             if metatype == "type_section":
-                print(level *"  " + name + ":")
+                print(level * "  " + name + ":")
             elif metatype == "type_document_content":
-                print(level *"  " + name)
+                print(level * "  " + name)
             elif metatype == "type_dimension":
-                print(level *"  " + name)
+                print(level * "  " + name)
             level += 1
 
         for child in meta["children"]:
@@ -575,6 +592,16 @@ class Section(object):
         else:
             vals.append(section)
 
+    def get_value(self, metaInfo):
+        if metaInfo.name in self.simple_values:
+            return self.simple_values[metaInfo.name]
+        elif metaInfo.name in self.array_values:
+            return self.array_values[metaInfo.name]
+        raise KeyError(metaInfo.name)
+
+    def get_subsections(self, metaInfo):
+        return self.subsections[metaInfo.name]
+
 
 class SectionManager(object):
     """Manages the sections for the given metainfo.
@@ -655,6 +682,27 @@ class SectionManager(object):
         except (KeyError, IndexError):
             raise Exception("Cannot add array values for metadata %s to section %d (%d) of %s, as it is not open" % (valueMetaInfo.name, gI, gIndex, self.metaInfo.name))
 
+    def get_value(self, valueMetaInfo, g_index):
+        if (g_index == -1):
+            gI = self.lastSectionGIndex
+        else:
+            gI = g_index
+        try:
+            return self.openSections[gI].get_value(valueMetaInfo)
+        except IndexError:
+            raise IndexError("Cannot get value for metadata %s to section %d (%d) of %s, as it is not open" % (valueMetaInfo.name, gI, g_index, self.metaInfo.name))
+
+    def get_subsections(self, valueMetaInfo, g_index=-1):
+        if (g_index == -1):
+            gI = self.lastSectionGIndex
+        else:
+            gI = g_index
+
+        try:
+            return self.openSections[gI].get_subsections(valueMetaInfo)
+        except (KeyError, IndexError):
+            return []
+
 
 class DataManager(object):
     """Stores the parent (SectionManager) for the given metainfo.
diff --git a/common/python/nomadcore/simple_parser.py b/common/python/nomadcore/simple_parser.py
index a544087247611a771bfeb0a37e0c35fbaf1210a8..85bb346c9da2c365178ef733d5725729eb0350f9 100644
--- a/common/python/nomadcore/simple_parser.py
+++ b/common/python/nomadcore/simple_parser.py
@@ -1,35 +1,41 @@
-from __future__ import division
-from future import standard_library
-standard_library.install_aliases()
-from builtins import str
-from builtins import range
-from builtins import object
 # todo: detect loops generated by forwardMatch
 # debugging parser (side by side)
 # unit conversion
 # caching and section triggers
 
 from collections import OrderedDict
-import re, sys, os
+import types
+import re
+import sys
+import os
 import json
-import logging, traceback, inspect
+import logging
+import traceback
+import inspect
+import io
+from urllib.parse import urlparse, urlunparse
+from builtins import str
+from builtins import range
+from builtins import object
+
 from nomadcore.json_support import jsonIndentF
 from nomadcore.parser_backend import JsonParseEventsWriterBackend
 from nomadcore.parse_streamed_dicts import ParseStreamedDicts
 from nomadcore.unit_conversion import unit_conversion
 from nomadcore.caching_backend import CachingLevel, ActiveBackend
 from nomadcore.annotator import Annotator
-import io
-from future.standard_library import install_aliases
-install_aliases()
-from urllib.parse import urlparse, urlunparse
+
 
 logger = logging.getLogger("nomadcore.simple_parser")
+logger.debug = types.MethodType(lambda *args, **kwargs: None, logger)
+
 annotate = False
 
+
 class SkipFileException(Exception):
     pass
 
+
 class PushbackLineFile(object):
     """a file interface where it is possible to put back read lines"""
 
@@ -62,19 +68,27 @@ class PushbackLineFile(object):
         self.lines.append(line)
         self.lineNr -= 1
 
+
 def uriFromRelPath(baseUri, basePath, newPath):
-    """returns an uri corresponding to newPath assuming that base path has uri baseUri.
-This will never change the net location (archive)."""
+    """
+    Returns an uri corresponding to newPath assuming that base path has uri baseUri.
+
+    This will never change the net location (archive).
+    """
     p1 = os.path.normpath(os.path.abspath(basePath))
     p2 = os.path.normpath(os.path.abspath(newPath))
-    rPath = os.path.relpath(p2,p1)
+    rPath = os.path.relpath(p2, p1)
     bUri = urlparse(baseUri)
-    nUri = bUri._replace(path = os.path.normpath(os.path.join(bUri.path, rPath)))
+    nUri = bUri._replace(path=os.path.normpath(os.path.join(bUri.path, rPath)))
     return urlunparse(nUri)
 
+
 class SimpleMatcher(object):
-    """A Something that matches either a single line, or multiple lines grouped together.
-    This is the base of a declarative parser."""
+    """
+    A Something that matches either a single line, or multiple lines grouped together.
+
+    This is the base of a declarative parser.
+    """
     class SubFlags(object):
         Sequenced = 0    # the subMatchers should be executed in sequence
         Unordered = 1    # the subMatchers can be in any order
@@ -82,26 +96,29 @@ class SimpleMatcher(object):
         # if the matcher has weak == true, this matcher should nevel steal
         # the matcher can/should repeat immediately
 
-    def __init__(self, startReStr, endReStr = None, subMatchers = tuple(), sections = tuple(),
-                 subFlags = SubFlags.Sequenced,
-                 weak = False,     # this matcher should not "steal" the position
-                 repeats = False,  # this matcher is expected to repeat
-                 required = False, # this value is required to have been matched on section close
-                 floating = False, # this section goes not steal the context, useful for example for low level debugging/error messages.
+    def __init__(self, startReStr, endReStr=None, subMatchers=tuple(), sections=tuple(),
+                 subFlags=SubFlags.Sequenced,
+                 weak=False,     # this matcher should not "steal" the position
+                 repeats=False,  # this matcher is expected to repeat
+                 required=False,  # this value is required to have been matched on section close
+                 # this section goes not steal the context, useful for example for low level debugging/error messages.
+                 floating=False,
                  # valid from the point they are read until the exit from the enclosing section
-                 forwardMatch = False, # if start match should not eat input, but be forwarded to adHoc and subMatchers
-                 name = "",
-                 adHoc = None,
-                 otherMetaInfo = tuple(), # The metainfos that are later manually added ot the backend
-                 fixedStartValues = None,
-                 fixedEndValues = None,
-                 dependencies = None,
+                 # if start match should not eat input, but be forwarded to adHoc and subMatchers
+                 forwardMatch=False,
+                 name="",
+                 adHoc=None,
+                 otherMetaInfo=tuple(),  # The metainfos that are later manually added ot the backend
+                 fixedStartValues=None,
+                 fixedEndValues=None,
+                 dependencies=None,
                  defLine=0,
                  defFile='',
-                 coverageIgnore=False, # mark line as ignored in coverage analysis
-                 onClose = None,   # A dictionary of onClose callbacks that are specific to this SimpleMatcher
-                 onOpen = None,   # A dictionary of onOpen callbacks that are specific to this SimpleMatcher
-                 startReAction = None,  # A callback function that is called when the starting regex is matcher. If the regex has any capturing groups, they will be provided as well to this function as parameter called "groups".
+                 coverageIgnore=False,  # mark line as ignored in coverage analysis
+                 onClose=None,   # A dictionary of onClose callbacks that are specific to this SimpleMatcher
+                 onOpen=None,   # A dictionary of onOpen callbacks that are specific to this SimpleMatcher
+                 # A callback function that is called when the starting regex is matcher. If the regex has any capturing groups, they will be provided as well to this function as parameter called "groups".
+                 startReAction=None,
                  ):
         self.index = -1
         self.startReStr = startReStr
@@ -126,7 +143,8 @@ class SimpleMatcher(object):
         self.onClose = onClose
         self.onOpen = onOpen
         self.startReAction = startReAction
-        self.keep = False   # Boolean flag used by the ParserOptimizer to determine which SimpleMatchers to keep
+        # Boolean flag used by the ParserOptimizer to determine which SimpleMatchers to keep
+        self.keep = False
         # boolean flag to signal that this SimpleMatcher does not have any
         # effect (besides progressing input file):
         #   - no data extracted
@@ -134,37 +152,38 @@ class SimpleMatcher(object):
         #   - no adHoc
         #   - no sections
         #   - no startReActions
-        self.does_nothing = (len(subMatchers) == 0 and
-                             len(sections) == 0 and
-                             fixedStartValues is None and
-                             fixedEndValues is None and
-                             adHoc is None and
-                             startReAction is None)
+        self.does_nothing = \
+            len(subMatchers) == 0 and \
+            len(sections) == 0 and \
+            fixedStartValues is None and \
+            fixedEndValues is None and \
+            adHoc is None and \
+            startReAction is None
+
         if self.does_nothing:
-            if startReStr is not None and len(extractGroupNames(startReStr))>0:
+            if startReStr is not None and len(extractGroupNames(startReStr)) > 0:
                 self.does_nothing = False
-            elif endReStr is not None and len(extractGroupNames(endReStr))>0:
+            elif endReStr is not None and len(extractGroupNames(endReStr)) > 0:
                 self.does_nothing = False
 
         self.coverageIgnore = coverageIgnore
         if coverageIgnore and not self.does_nothing:
             logger.debug(
-                "SimpleMatcher '%s' has an effect " +
-                "and is marked as coverageIgnore", name)
+                "SimpleMatcher '%s' has an effect and is marked as coverageIgnore", name)
 
         self.coverageIgnore = coverageIgnore
-        caller=inspect.currentframe()
+        caller = inspect.currentframe()
         if (defFile == '') and (defLine == 0):
             if (caller is not None) and (caller.f_back is not None):
                 callerInfo = inspect.getframeinfo(caller.f_back)
-                self.defFile=callerInfo.filename
-                self.defLine=callerInfo.lineno
+                self.defFile = callerInfo.filename
+                self.defLine = callerInfo.lineno
                 # logger.debug('caller frame info %s' % str(callerInfo))
             else:
                 logger.info('could not get caller info')
         else:
-            self.defFile=defFile
-            self.defLine=defLine
+            self.defFile = defFile
+            self.defLine = defLine
         for g in subMatchers:
             if g.superMatcher is None:
                 g.superMatcher = self
@@ -175,28 +194,27 @@ class SimpleMatcher(object):
     def copy(self):
         """creates a deep copy (but with no index, and in the top level copy no superMatcher)"""
         simpleMatcher = SimpleMatcher(
-            startReStr = self.startReStr,
-            endReStr = self.endReStr,
-            subMatchers = [x.copy() for x in self.subMatchers],
-            sections = self.sections,
-            subFlags = self.subFlags,
-            weak = self.weak,
-            repeats = self.repeats,
-            required = self.required,
-            floating = self.floating,
-            forwardMatch = self.forwardMatch,
-            adHoc = self.adHoc,
-            otherMetaInfo = self.otherMetaInfo,
-            name = self.name,
-            fixedStartValues = self.fixedStartValues,
-            fixedEndValues = self.fixedEndValues,
-            dependencies = self.dependencies,
-            defLine = self.defLine,
-            defFile = self.defFile,
-            onClose = self.onClose,
-            onOpen = self.onOpen,
-            startReAction = self.startReAction,
-            )
+            startReStr=self.startReStr,
+            endReStr=self.endReStr,
+            subMatchers=[x.copy() for x in self.subMatchers],
+            sections=self.sections,
+            subFlags=self.subFlags,
+            weak=self.weak,
+            repeats=self.repeats,
+            required=self.required,
+            floating=self.floating,
+            forwardMatch=self.forwardMatch,
+            adHoc=self.adHoc,
+            otherMetaInfo=self.otherMetaInfo,
+            name=self.name,
+            fixedStartValues=self.fixedStartValues,
+            fixedEndValues=self.fixedEndValues,
+            dependencies=self.dependencies,
+            defLine=self.defLine,
+            defFile=self.defFile,
+            onClose=self.onClose,
+            onOpen=self.onOpen,
+            startReAction=self.startReAction)
         simpleMatcher.keep = self.keep
         return simpleMatcher
 
@@ -237,7 +255,8 @@ class SimpleMatcher(object):
         """calculates the flat index of this and each sub matcher"""
         flatIndex.append(self)
         if self.index != -1:
-            raise Exception("matcher already had index %d, now inserted at %d" % (self.index, len(flatIndex) - 1))
+            raise Exception("matcher already had index %d, now inserted at %d" % (
+                self.index, len(flatIndex) - 1))
         self.index = len(flatIndex) - 1
         for m in self.subMatchers:
             m.calcIndexes(flatIndex)
@@ -273,7 +292,7 @@ class SimpleMatcher(object):
             m.allMetaNames(metaNames)
         return metaNames
 
-    def allSections(self, sections = None):
+    def allSections(self, sections=None):
         """all the names of the sections that might be opened by this matcher"""
         if sections is None:
             sections = set()
@@ -292,14 +311,14 @@ class SimpleMatcher(object):
 
         # First print sections
         if sections:
-            logger.info((level)*"  " + "SECTIONS: " + str(sections))
+            logger.info((level) * "  " + "SECTIONS: " + str(sections))
 
         # Then print metanames. Indentation increases if a section has also been opened.
         metaname_level = level
         if sections and metanames:
             metaname_level += 1
         if metanames:
-            logger.info(metaname_level*"  " + "METAINFOS: " + str(metanames))
+            logger.info(metaname_level * "  " + "METAINFOS: " + str(metanames))
 
         # Recursion
         new_level = level
@@ -312,8 +331,8 @@ class SimpleMatcher(object):
 def closingParen(regex, i, nparen):
     """returns the position after the closing parethesis"""
     j = i
-    l = len(regex)
-    while j < l:
+    length = len(regex)
+    while j < length:
         c = regex[j]
         j += 1
         if c == '\\':
@@ -324,7 +343,7 @@ def closingParen(regex, i, nparen):
             nparen -= 1
             if nparen <= 0:
                 return j
-    return l
+    return length
 
 
 def disableGroups(regex):
@@ -333,9 +352,9 @@ def disableGroups(regex):
     j = 0
     res = ""
     flags = ""
-    l = len(regex)
+    length = len(regex)
     try:
-        while j < l:
+        while j < length:
             c = regex[j]
             j += 1
             if c == '\\':
@@ -344,23 +363,23 @@ def disableGroups(regex):
                     raise Exception('backreferneces not supported')
                 j += 1
             elif c == '(':
-                if j >= l:
+                if j >= length:
                     break
                 c = regex[j]
                 j += 1
                 if c == '?':
-                    if j >= l:
+                    if j >= length:
                         break
                     c = regex[j]
                     j += 1
-                    if c == ':' :
+                    if c == ':':
                         pass
                     elif c == 'P':
                         c = regex[j]
                         j += 1
                         if c == '<':
-                            res += regex[i : j - 2] + ":"
-                            k=regex.find('>',j)
+                            res += regex[i: j - 2] + ":"
+                            k = regex.find('>', j)
                             if k == -1:
                                 raise Exception("expected (P<xx>)")
                             i = k + 1
@@ -368,23 +387,26 @@ def disableGroups(regex):
                         elif c == '=':
                             raise Exception('backreferneces not supported')
                         else:
-                            raise Exception('unexpected character sequence "(P%s"' % c)
+                            raise Exception(
+                                'unexpected character sequence "(P%s"' % c)
                     elif c in 'im':
                         if c == 'i':
                             flags += 'i'
-                        res += regex[i : j - 3]
-                        while j < l:
+                        res += regex[i: j - 3]
+                        while j < length:
                             c = regex[j]
                             j += 1
                             if c == ')':
                                 break
-                            if c ==  'i':
+                            if c == 'i':
                                 flags += 'i'
                             elif not c == 'm':
-                                raise Exception('Regexp flag %s is not supported by disableGroups only i (ignore case) is supported, and m (multiline) is the default' % c)
+                                raise Exception(
+                                    'Regexp flag %s is not supported by disableGroups only i (ignore case) is supported, and m (multiline) is the default' % c)
                         i = j
                     elif c in 'Lsux':
-                        raise Exception('Regexp flag %s is not supported by disableGroups' % c)
+                        raise Exception(
+                            'Regexp flag %s is not supported by disableGroups' % c)
                     elif c == '#':
                         res += regex[i]
                         j = closingParen(regex, j, 1)
@@ -392,23 +414,27 @@ def disableGroups(regex):
                     elif c in '=!':
                         pass
                     elif c == '<':
-                        if j < l:
+                        if j < length:
                             c = regex[j]
                             j += 1
                             if c in '=!':
-                                raise Exception("Regexp sequence (?<%s...) unknwon" % c)
+                                raise Exception(
+                                    "Regexp sequence (?<%s...) unknwon" % c)
                     elif c == '(':
-                        raise Exception("boolean matching not supported by disableGroups")
+                        raise Exception(
+                            "boolean matching not supported by disableGroups")
                     else:
-                        raise Exception("unsupported character "+c+" after (?")
+                        raise Exception("unsupported character " + c + " after (?")
                 else:
-                    res += regex[i : j - 1] + "?:"
-                    i = j -1
+                    res += regex[i: j - 1] + "?:"
+                    i = j - 1
     except Exception as e:
-        raise Exception("disableGroups failure on '%s' at %d" % (regex, j - 1), e)
+        raise Exception("disableGroups failure on '%s' at %d" %
+                        (regex, j - 1), e)
     res += regex[i:]
     return res
 
+
 def decodeUnits(units):
     """decodes units: _ between letters is a *, _ before a number is a -, and ^ is inserted before numbers
     for example "m_s_1" becomes m*s^-1"""
@@ -420,10 +446,10 @@ def decodeUnits(units):
     underscoreAfterChar = 3
     underscoreAfterNumber = 4
     state = chars
-    l = len(units)
-    while j < l:
+    length = len(units)
+    while j < length:
         if state == chars:
-            while j < l:
+            while j < length:
                 c = units[j]
                 j += 1
                 if c.isalpha():
@@ -440,9 +466,10 @@ def decodeUnits(units):
                     state = underscoreAfterChar
                     break
                 else:
-                    raise Exception("invalid char %r at %d while decoding units from %r" % (c, j - 1, units))
+                    raise Exception(
+                        "invalid char %r at %d while decoding units from %r" % (c, j - 1, units))
         elif state == number:
-            while j < l:
+            while j < length:
                 c = units[j]
                 j += 1
                 if c.isalpha():
@@ -459,9 +486,10 @@ def decodeUnits(units):
                     state = underscoreAfterNumber
                     break
                 else:
-                    raise Exception("invalid char %r at %d while decoding units from %r" % (c, j - 1, units))
+                    raise Exception(
+                        "invalid char %r at %d while decoding units from %r" % (c, j - 1, units))
         elif state == underscoreAfterChar:
-            while j < l:
+            while j < length:
                 c = units[j]
                 j += 1
                 if c.isalpha():
@@ -475,9 +503,10 @@ def decodeUnits(units):
                     state = number
                     break
                 else:
-                    raise Exception("invalid char %r at %d while decoding units from %r" % (c, j - 1, units))
+                    raise Exception(
+                        "invalid char %r at %d while decoding units from %r" % (c, j - 1, units))
         elif state == underscoreAfterNumber:
-            while j < l:
+            while j < length:
                 c = units[j]
                 j += 1
                 if c.isalpha():
@@ -486,25 +515,28 @@ def decodeUnits(units):
                     state = chars
                     break
                 else:
-                    raise Exception("invalid char %r at %d while decoding units from %r" % (c, j - 1, units))
+                    raise Exception(
+                        "invalid char %r at %d while decoding units from %r" % (c, j - 1, units))
         else:
-            raise Exception("invalid state %s at %d while decoding units from %r" % (state, j - 1, units))
+            raise Exception(
+                "invalid state %s at %d while decoding units from %r" % (state, j - 1, units))
     if state == chars:
         res.append(units[i:])
     elif state == number:
         res.append(units[i:])
     else:
-        raise Exception("invalid state %s at end of decodeUnits(%r)" % (state, units))
+        raise Exception(
+            "invalid state %s at end of decodeUnits(%r)" % (state, units))
     return "".join(res)
 
+
 def extractGroupNames(regex):
     """extracts all group names from a regex"""
     j = 0
     groupNames = []
-    flags = ""
-    l = len(regex)
+    length = len(regex)
     try:
-        while j < l:
+        while j < length:
             c = regex[j]
             j += 1
             if c == '\\':
@@ -513,27 +545,28 @@ def extractGroupNames(regex):
                     raise Exception('backreferences not supported')
                 j += 1
             elif c == '(':
-                if j >= l:
+                if j >= length:
                     break
                 c = regex[j]
                 j += 1
                 if c == '?':
-                    if j >= l:
+                    if j >= length:
                         break
                     c = regex[j]
                     j += 1
-                    if c == ':' :
+                    if c == ':':
                         pass
                     elif c == 'P':
                         c = regex[j]
                         j += 1
                         if c == '<':
-                            k=regex.find('>',j)
+                            k = regex.find('>', j)
                             if k == -1:
                                 raise Exception("expected (P<xx>)")
-                            gs = regex[j : k].split("__")
+                            gs = regex[j: k].split("__")
                             if len(gs) > 2:
-                                raise Exception("only a __ is accepted in a group name (to add units), got %s" % regex[j : k])
+                                raise Exception(
+                                    "only a __ is accepted in a group name (to add units), got %s" % regex[j: k])
                             if len(gs) == 2:
                                 unit = decodeUnits(gs[1])
                             else:
@@ -542,39 +575,45 @@ def extractGroupNames(regex):
                             groupNames.append(gAndUnit)
                             j = k + 1
     except Exception as e:
-        raise Exception("extractGroupNames failure on '%s' at %d" % (regex, j - 1), e)
+        raise Exception("extractGroupNames failure on '%s' at %d" %
+                        (regex, j - 1), e)
     return groupNames
 
+
 class CompiledMatcher(object):
 
     def matchersToRe(self, possibleNexts):
         "builds a unique regular expression for the given possible matchers"
         res = []
         for startEnd, nextMatcher in possibleNexts:
-            baseRe = disableGroups(nextMatcher.startReStr if startEnd == 0 else nextMatcher.endReStr)
+            baseRe = disableGroups(
+                nextMatcher.startReStr if startEnd == 0 else nextMatcher.endReStr)
             newRe = "(?P<i%d>%s)" % (nextMatcher.index * 2 + startEnd, baseRe)
             res.append(newRe)
         return re.compile("|".join(res))
 
     def __init__(self, matcher, possibleNexts, possibleNextsEnd, parserBuilder):
         self.matcher = matcher
-        self.groupname = [[],[]]
+        self.groupname = [[], []]
         self.startRe = re.compile(matcher.startReStr)
-        self.groupname[0].extend(str(num) for num in range(1, self.startRe.groups+1))
+        self.groupname[0].extend(str(num)
+                                 for num in range(1, self.startRe.groups + 1))
         for name, num in self.startRe.groupindex.items():
-            self.groupname[0][num-1] = name
+            self.groupname[0][num - 1] = name
         self.endRe = None
         if matcher.endReStr:
             self.endRe = re.compile(matcher.endReStr)
-            self.groupname[1].extend(str(num) for num in range(1, self.endRe.groups+1))
+            self.groupname[1].extend(str(num)
+                                     for num in range(1, self.endRe.groups + 1))
             for name, num in self.endRe.groupindex.items():
-                self.groupname[1][num-1] = name
+                self.groupname[1][num - 1] = name
         self.floating = matcher.floating
         currentM = matcher.superMatcher
         while currentM:
             if currentM.floating:
                 if self.floating:
-                    raise Exception("nesting of floating in floating not supported (%s in %s)" % (currentM.desc(), matcher.desc()))
+                    raise Exception("nesting of floating in floating not supported (%s in %s)" % (
+                        currentM.desc(), matcher.desc()))
                 self.floating = True
             currentM = currentM.superMatcher
         self.possibleNexts = possibleNexts
@@ -602,7 +641,8 @@ class CompiledMatcher(object):
                 # If there is a metainfo-specific unit conversion, use it
                 metainfo_unit = None
                 if parserBuilder.metainfo_units is not None:
-                    metainfo_unit = parserBuilder.metainfo_units.get(metaInfo.name)
+                    metainfo_unit = parserBuilder.metainfo_units.get(
+                        metaInfo.name)
                     if metainfo_unit is not None:
                         target_unit = metainfo_unit
                 # If there is a dimension-specific unit conversion, use it
@@ -612,7 +652,8 @@ class CompiledMatcher(object):
                     if map_unit:
                         target_unit = map_unit
 
-                converters[gName] = unit_conversion.convert_unit_function(units, target_unit)
+                converters[gName] = unit_conversion.convert_unit_function(
+                    units, target_unit)
         self.converters = converters
         self.transformers = transformers
 
@@ -649,10 +690,12 @@ class CompiledMatcher(object):
 
     def handleStartMatch(self, parser):
         line = parser.fIn.readline()
-        logger.debug("handleStartMatch of %s on (%s)", self.matcher.desc(),line)
-        m = self.startRe.match(line) #, re.MULTILINE)
+        logger.debug("handleStartMatch of %s on (%s)",
+                     self.matcher.desc(), line)
+        m = self.startRe.match(line)  # , re.MULTILINE)
         if not m:
-            raise Exception("Expected to match %s on %r" % (self.startRe.pattern, line))
+            raise Exception("Expected to match %s on %r" %
+                            (self.startRe.pattern, line))
         self.handleMatchTelemetry(parser, m, line, 0)
         result_dict = {}
 
@@ -677,24 +720,25 @@ class CompiledMatcher(object):
         # If the match needs to be forwarded, push the read line back into the
         # file stream
         if self.matcher.forwardMatch:
-            logger.debug("handleStartMatch of %s on (%s) pushing back line", self.matcher.desc(),line)
+            logger.debug(
+                "handleStartMatch of %s on (%s) pushing back line", self.matcher.desc(), line)
             parser.fIn.pushbackLine(line)
 
         return result_dict
 
     def handleEndMatch(self, parser):
         line = parser.fIn.readline()
-        m = self.endRe.match(line) #, re.MULTILINE)
+        m = self.endRe.match(line)  # , re.MULTILINE)
         self.handleMatchTelemetry(parser, m, line, 1)
         result_dict = {}
-        for k,v in sorted(m.groupdict().items()):
+        for k, v in sorted(m.groupdict().items()):
             if v is None:
                 # a group may be optional (subexpression of ? or | in regex)
                 continue
             k_converted, v_converted = self.addStrValue(parser.backend, k, v)
             result_dict[k_converted] = v_converted
         if self.matcher.fixedEndValues:
-            for k,v in sorted(self.matcher.fixedEndValues.items()):
+            for k, v in sorted(self.matcher.fixedEndValues.items()):
                 if k not in result_dict:
                     v_converted = self.addValue(parser.backend, k, v)
                     result_dict[k] = v_converted
@@ -709,9 +753,7 @@ class CompiledMatcher(object):
             line = parser.fIn.readline()
             if not line:
                 return -1
-            m = regex.match(line) #, re.MULTILINE)
-            matchedFile=''
-            matchedLine=0
+            m = regex.match(line)  # , re.MULTILINE)
             if m:
                 # logger.debug("line %d has a match for %d: %s", parser.fIn.lineNr, self.matcher.index, m.groupdict())
                 groups = m.groups()
@@ -721,7 +763,8 @@ class CompiledMatcher(object):
                         targetStartEnd, targetMatcher = possible[i]
                         nextI = 2 * targetMatcher.index + targetStartEnd
                         if m.group("i%s" % nextI) is None:
-                            raise Exception("inconsistent match (group name vs group pos)")
+                            raise Exception(
+                                "inconsistent match (group name vs group pos)")
                         # The call to self.annotate needs to be in
                         # handleStartMatch/handleEndMatch, where the
                         # original RE is applied, including all capturing groups
@@ -753,12 +796,12 @@ class CompiledMatcher(object):
             'matcherName': self.matcher.name,
             'defFile': self.matcher.defFile,
             'defLine': self.matcher.defLine,
-            'matcher_does_nothing': self.matcher.does_nothing, # matcher without effect
+            'matcher_does_nothing': self.matcher.does_nothing,  # matcher without effect
             'which_re': 'end' if targetStartEnd else 'start',
             # classification of match
             'matchFlags': 0,
-            'match': 0, # 0 - no, 1 - partial, 3 - full
-            'coverageIgnore': 0, # 0 - no, 1 - local, 3 - global
+            'match': 0,  # 0 - no, 1 - partial, 3 - full
+            'coverageIgnore': 0,  # 0 - no, 1 - local, 3 - global
             # overall span of match, and spans of group captures
             'span': [],
             # capture group names
@@ -767,30 +810,32 @@ class CompiledMatcher(object):
         if match:
             span = match.span()
             if span[0] == 0 and span[1] == len(fInLine):
-                result['match'] = 3 # full match
+                result['match'] = 3  # full match
             else:
-                result['match'] = 1 # partial match
-            if self.matcher.coverageIgnore: # matcher is local coverageIgnore
+                result['match'] = 1  # partial match
+            if self.matcher.coverageIgnore:  # matcher is local coverageIgnore
                 result['coverageIgnore'] = 1
             result['matcherGroup'] = self.groupname[targetStartEnd]
             # Forward compatibility with 'regex' or 're2', which support
             # multiple captures per group:
             #   span: list of lists of tuples: [groups, captures, 2]
-            result['span'] = [[span]] + list([] for i in range(match.re.groups))
+            result['span'] = [[span]] + list([]
+                                             for i in range(match.re.groups))
             # capture groups
-            for groupi in range(1, match.re.groups+1):
+            for groupi in range(1, match.re.groups + 1):
                 if match.group(groupi) is not None:
                     result['span'][groupi].append(match.span(groupi))
         else:
-            m_ci = parser.coverageIgnore.match(fInLine) # check global coverageIgnore
+            m_ci = parser.coverageIgnore.match(
+                fInLine)  # check global coverageIgnore
             if m_ci:
                 result['coverageIgnore'] = 3
-                span =  m_ci.span()
+                span = m_ci.span()
                 result['span'] = [[span]]
                 if span[0] == 0 and span[1] == len(fInLine):
-                    result['match'] = 3 # full match
+                    result['match'] = 3  # full match
                 else:
-                    result['match'] = 1 # partial match
+                    result['match'] = 1  # partial match
                 result['matcher_does_nothing'] = True
                 result['matcherName'] = 'coverageIgnore'
         # flatten span:
@@ -810,26 +855,32 @@ class CompiledMatcher(object):
             for s in g:
                 span_flat.extend(s)
         result['matchSpansFlat'] = span_flat
-        result['matchFlags'] = (
-            result['match'] | (result['coverageIgnore'] << 2) |
-            targetStartEnd << 5 | int(result['matcher_does_nothing'] and result['match']) << 6)
+        result['matchFlags'] = \
+            result['match'] | \
+            (result['coverageIgnore'] << 2) | \
+            targetStartEnd << 5 | \
+            int(result['matcher_does_nothing'] and result['match']) << 6
         return result
 
     def handleMatchTelemetry(self, parser, match, line, targetStartEnd):
-        match_telemetry = self.getMatchTelemetry(parser, match, line, targetStartEnd)
+        match_telemetry = self.getMatchTelemetry(
+            parser, match, line, targetStartEnd)
         parser.updateParsingStats(match_telemetry)
         parser.backend.addMatchTelemetry(match_telemetry)
         parser.annotator.annotate(match_telemetry)
 
-class ParsingContext(object): # use slots?
+
+class ParsingContext(object):  # use slots?
     Start = 0
     End = 1
+
     def __init__(self, index, sections, compiledMatcher, startEnd):
         self.index = index
         self.startEnd = startEnd
         self.sections = sections
         self.compiledMatcher = compiledMatcher
 
+
 def extractOnCloseTriggers(obj):
     """extracts all triggers from the object obj
 
@@ -843,6 +894,7 @@ def extractOnCloseTriggers(obj):
             triggers[name] = getattr(obj, attr)
     return triggers
 
+
 def extractOnOpenTriggers(obj):
     """extracts all triggers from the object obj
 
@@ -856,6 +908,7 @@ def extractOnOpenTriggers(obj):
             triggers[name] = getattr(obj, attr)
     return triggers
 
+
 def extractStrValueTransforms(obj):
     """extracts all string-to-value transformations from obj
 
@@ -880,6 +933,7 @@ def extractStrValueTransforms(obj):
             transform[name] = [callback, units]
     return transform
 
+
 class SimpleParserBuilder(object):
     def __init__(self, rootMatcher, metaInfoEnv, metaInfoToKeep=None, default_units=None, metainfo_units=None, strValueTransform=None):
         """
@@ -909,21 +963,22 @@ class SimpleParserBuilder(object):
         missingSections = {}
         for matcher in self.flatIndex:
             for metaName in matcher.directMetaNames():
-                if not metaName in self.metaInfoEnv:
+                if metaName not in self.metaInfoEnv:
                     ms = missingMeta.get(metaName, None)
                     if ms:
                         ms.append(matcher)
                     else:
                         missingMeta[metaName] = [matcher]
             for sectionName in matcher.sections:
-                if not sectionName in self.metaInfoEnv:
+                if sectionName not in self.metaInfoEnv:
                     ms = missingSections.get(sectionName, None)
                     if ms:
                         ms.append(matcher)
                     else:
                         missingSections[sectionName] = [matcher]
         for metaName, matchers in missingMeta.items():
-            outF.write("Meta info %s is not in the meta info, but is used in the following matchers:\n" % metaName)
+            outF.write(
+                "Meta info %s is not in the meta info, but is used in the following matchers:\n" % metaName)
             for m in matchers:
                 outF.write("  ")
                 outF.write(m.desc())
@@ -937,7 +992,8 @@ class SimpleParserBuilder(object):
   }
 """ % (metaName))
         for sectionName, matchers in missingSections.items():
-            outF.write("A section named %s is not defined in the meta info, but is used in the following matchers:\n" % sectionName)
+            outF.write(
+                "A section named %s is not defined in the meta info, but is used in the following matchers:\n" % sectionName)
             for m in matchers:
                 outF.write("  ")
                 outF.write(m.desc())
@@ -963,19 +1019,19 @@ class SimpleParserBuilder(object):
             if first:
                 first = False
                 if m.floating:
-                    floatingMatchers.append((ParsingContext.Start,m))
+                    floatingMatchers.append((ParsingContext.Start, m))
                 else:
-                    firstMatchers.append((ParsingContext.Start,m))
+                    firstMatchers.append((ParsingContext.Start, m))
             else:
                 if m.floating:
-                    floatingMatchers.append((ParsingContext.Start,m))
+                    floatingMatchers.append((ParsingContext.Start, m))
                 elif not m.weak:
-                    firstMatchers.append((ParsingContext.Start,m))
-        if matcher.endReStr: # move before the sub matchers?
+                    firstMatchers.append((ParsingContext.Start, m))
+        if matcher.endReStr:  # move before the sub matchers?
             firstMatchers.append((ParsingContext.End, matcher))
         if matcher.repeats:
-            firstMatchers.append((ParsingContext.Start,matcher))
-            firstMatchersEnd.append((ParsingContext.Start,matcher))
+            firstMatchers.append((ParsingContext.Start, matcher))
+            firstMatchersEnd.append((ParsingContext.Start, matcher))
         if superMatcher is not None:
             subFlags = superMatcher.subFlags
             if subFlags == SimpleMatcher.SubFlags.Sequenced:
@@ -984,47 +1040,55 @@ class SimpleParserBuilder(object):
                     if m == matcher:
                         found = 1
                     elif m.floating:
-                        floatingMatchers.append((ParsingContext.Start,m))
-                        floatingMatchersEnd.append((ParsingContext.Start,m))
+                        floatingMatchers.append((ParsingContext.Start, m))
+                        floatingMatchersEnd.append((ParsingContext.Start, m))
                         if found == 1:
                             found += 1
                     elif found == 1:
                         if not m.weak or not matcher.subMatchers:
-                            firstMatchers.append((ParsingContext.Start,m))
-                        firstMatchersEnd.append((ParsingContext.Start,m))
+                            firstMatchers.append((ParsingContext.Start, m))
+                        firstMatchersEnd.append((ParsingContext.Start, m))
                         found += 1
                     elif found > 0:
                         if not m.weak:
-                            firstMatchers.append((ParsingContext.Start,m))
-                            firstMatchersEnd.append((ParsingContext.Start,m))
+                            firstMatchers.append((ParsingContext.Start, m))
+                            firstMatchersEnd.append((ParsingContext.Start, m))
             elif subFlags == SimpleMatcher.SubFlags.Unordered:
                 for m in superMatcher.subMatchers:
                     if m == matcher:
                         continue
                     if m.weak:
-                        raise Exception("Weak not supported in unordered superMatcher for " + m.desc())
+                        raise Exception(
+                            "Weak not supported in unordered superMatcher for " + m.desc())
                     if m.floating:
-                        floatingMatchers.append((ParsingContext.Start,m))
-                        floatingMatchersEnd.append((ParsingContext.Start,m))
+                        floatingMatchers.append((ParsingContext.Start, m))
+                        floatingMatchersEnd.append((ParsingContext.Start, m))
                     else:
-                        firstMatchers.append((ParsingContext.Start,m))
-                        firstMatchersEnd.append((ParsingContext.Start,m))
+                        firstMatchers.append((ParsingContext.Start, m))
+                        firstMatchersEnd.append((ParsingContext.Start, m))
             else:
                 raise Exception("unknown subFlags %d" % subFlags)
-        logger.debug("firstMatchers: %s", [(x[0],x[1].index) for x in firstMatchers])
-        logger.debug("firstMatchersEnd %s", [(x[0],x[1].index) for x in firstMatchersEnd])
-        logger.debug("inheritedStealingMatchers\n %s", [(x[0],x[1].index) for x in inheritedStealingMatchers])
-        logger.debug("floatingMatchers %s", [(x[0],x[1].index) for x in floatingMatchers])
-        logger.debug("floatingMatchersEnd %s", [(x[0],x[1].index) for x in floatingMatchersEnd])
-        logger.debug("inheritedFloatingMatchers %s", [(x[0],x[1].index) for x in inheritedFloatingMatchers])
+        logger.debug("firstMatchers: %s", [
+                     (x[0], x[1].index) for x in firstMatchers])
+        logger.debug("firstMatchersEnd %s", [
+                     (x[0], x[1].index) for x in firstMatchersEnd])
+        logger.debug("inheritedStealingMatchers\n %s", [
+                     (x[0], x[1].index) for x in inheritedStealingMatchers])
+        logger.debug("floatingMatchers %s", [
+                     (x[0], x[1].index) for x in floatingMatchers])
+        logger.debug("floatingMatchersEnd %s", [
+                     (x[0], x[1].index) for x in floatingMatchersEnd])
+        logger.debug("inheritedFloatingMatchers %s", [
+                     (x[0], x[1].index) for x in inheritedFloatingMatchers])
         self.compiledMatchers[matcher.index] = CompiledMatcher(
-            matcher = matcher,
-            possibleNexts = firstMatchers + inheritedStealingMatchers + floatingMatchers + inheritedFloatingMatchers,
-            possibleNextsEnd = firstMatchersEnd + inheritedStealingMatchers + floatingMatchersEnd + inheritedFloatingMatchers,
-            parserBuilder = self)
+            matcher=matcher,
+            possibleNexts=firstMatchers + inheritedStealingMatchers + floatingMatchers + inheritedFloatingMatchers,
+            possibleNextsEnd=firstMatchersEnd + inheritedStealingMatchers + floatingMatchersEnd + inheritedFloatingMatchers,
+            parserBuilder=self)
 
     def scanMatcher(self, matcher, inheritedStealingMatchers, inheritedFloatingMatchers):
-        self.handleMatcher(matcher, inheritedStealingMatchers, inheritedFloatingMatchers)
+        self.handleMatcher(matcher, inheritedStealingMatchers,
+                           inheritedFloatingMatchers)
         superMatcher = matcher.superMatcher
         firstMatchers = []
         floatingMatchers = []
@@ -1048,7 +1112,8 @@ class SimpleParserBuilder(object):
                     if m == matcher:
                         continue
                     if m.weak:
-                        raise Exception("Weak not supported in unordered superMatcher for " + m.desc())
+                        raise Exception(
+                            "Weak not supported in unordered superMatcher for " + m.desc())
                     if m.floating:
                         floatingMatchers.append((ParsingContext.Start, m))
                     else:
@@ -1075,7 +1140,8 @@ class SimpleParserBuilder(object):
         if self.metaInfoToKeep:
             logger.info("Optimizing parsing tree...")
             optimizer = ParserOptimizer()
-            optimizer.optimizeParsingTree(self.rootMatcher, self.metaInfoToKeep)
+            optimizer.optimizeParsingTree(
+                self.rootMatcher, self.metaInfoToKeep)
             # self.rootMatcher.pruneParsingTree(self.metaInfoToKeep)
 
         # If logging is activated, print out the pruned tree for debugging
@@ -1087,17 +1153,18 @@ class SimpleParserBuilder(object):
     def buildParser(self, fIn, backend, superContext, baseUri, basePath):
         return SimpleParser(self, fIn, backend, superContext, baseUri, basePath)
 
-    def writeMatchers(self, outF, extraIndent = 0):
+    def writeMatchers(self, outF, extraIndent=0):
         outF.write("[")
         for i in range(len(self.flatIndex)):
             if i != 0:
                 outF.write(", ")
-            jsonIndentF(self.flatIndex[i].toDict(), outF, extraIndent = extraIndent + 2)
+            jsonIndentF(self.flatIndex[i].toDict(),
+                        outF, extraIndent=extraIndent + 2)
         outF.write("\n")
         outF.write(" " * extraIndent)
         outF.write("]")
 
-    def writeCompiledMatchers(self, outF, extraIndent = 0):
+    def writeCompiledMatchers(self, outF, extraIndent=0):
         outF.write("{")
         keys = list(self.compiledMatchers.keys())
         keys.sort()
@@ -1109,13 +1176,15 @@ class SimpleParserBuilder(object):
                 outF.write(", ")
             jsonIndentF(str(i), outF)
             outF.write(":")
-            jsonIndentF(self.compiledMatchers[i].toDict(), outF, extraIndent = extraIndent + 2)
+            jsonIndentF(self.compiledMatchers[i].toDict(
+            ), outF, extraIndent=extraIndent + 2)
         outF.write("\n")
         outF.write(" " * extraIndent)
         outF.write("}")
 
+
 class SimpleParser(object):
-    def __init__(self, parserBuilder, fIn, backend, superContext = None, baseUri = None, basePath = None):
+    def __init__(self, parserBuilder, fIn, backend, superContext=None, baseUri=None, basePath=None):
         self.parserBuilder = parserBuilder
         self.fIn = fIn
         self.backend = backend
@@ -1126,7 +1195,7 @@ class SimpleParser(object):
         self.baseUri = baseUri
         self.basePath = basePath
         if backend.sectionManagers is not None:
-            r=backend.sectionManagers.get("section_run")
+            r = backend.sectionManagers.get("section_run")
             if r:
                 r.onOpen.append(self.emitBaseUri)
 
@@ -1138,14 +1207,14 @@ class SimpleParser(object):
         for counter in ['total', 'ignore', 'match', 'partial', 'unmatched']:
             self.parsingStats[counter] = 0
         if annotate:
-            annofilename=fIn.fIn.name + ".annotate"
+            annofilename = fIn.fIn.name + ".annotate"
         else:
             logger.info("no annotations requested for " + fIn.fIn.name)
         self.annotator = Annotator(annotateFilename=annofilename)
         self.enterInState(0)
         compiledRootMatcher = self.parserBuilder.compiledMatchers[0]
         if compiledRootMatcher.matcher.fixedStartValues:
-            for k,v in sorted(compiledRootMatcher.matcher.fixedStartValues.items()):
+            for k, v in sorted(compiledRootMatcher.matcher.fixedStartValues.items()):
                 compiledRootMatcher.addValue(backend, k, v)
 
     def uriForPath(self, path):
@@ -1181,7 +1250,8 @@ class SimpleParser(object):
             if callback:
                 callback(self.backend, None, None)
 
-        self.context.append(ParsingContext(stateIndex, sects, compiledMatcher, ParsingContext.Start))
+        self.context.append(ParsingContext(
+            stateIndex, sects, compiledMatcher, ParsingContext.Start))
 
     def contextClose(self, cNow):
 
@@ -1218,15 +1288,16 @@ class SimpleParser(object):
         return str(["#%d %s" % (c.index, ["start", "end"][c.startEnd]) for c in self.context])
 
     def goToMatcher(self, targetMatcher, startEnd):
-        logger.debug("goToMatcher(#%d, %s)", targetMatcher.index, ["start", "end"][startEnd])
+        logger.debug("goToMatcher(#%d, %s)", targetMatcher.index,
+                     ["start", "end"][startEnd])
         states = set(c.index for c in self.context)
         targetStack = []
         mNow = targetMatcher
-        while (mNow and not mNow.index in states):
+        while (mNow and mNow.index not in states):
             targetStack.append(mNow)
             mNow = mNow.superMatcher
         if not mNow:
-            raise Exception("No common matcher between the target %s and the current context %s" %(
+            raise Exception("No common matcher between the target %s and the current context %s" % (
                 targetMatcher.desc(), self.contextDesc()))
         states = list(c.index for c in self.context)
         commonIndex = mNow.index
@@ -1254,7 +1325,8 @@ class SimpleParser(object):
             return False
         currentContext = self.context[len(self.context) - 1]
         if logger.isEnabledFor(logging.DEBUG):
-            logger.debug("lineNr: %d context: %s", self.fIn.lineNr,self.contextDesc())
+            logger.debug("lineNr: %d context: %s",
+                         self.fIn.lineNr, self.contextDesc())
         if currentContext.startEnd == ParsingContext.Start:
             nextI = currentContext.compiledMatcher.findNextMatch(self)
         else:
@@ -1266,7 +1338,8 @@ class SimpleParser(object):
             while self.context:
                 self.contextPop()
             if nextI != -1 and nextI != -3:
-                raise Exception("finished with error with parsing context %s" % (cNames))
+                raise Exception(
+                    "finished with error with parsing context %s" % (cNames))
         else:
             index = nextI // 2
             startEnd = nextI % 2
@@ -1275,10 +1348,12 @@ class SimpleParser(object):
             logger.debug("new context: %s\n", self.contextDesc())
             currentCtx = self.context[len(self.context) - 1]
             if startEnd == ParsingContext.End:
-                self.lastMatch = currentCtx.compiledMatcher.handleEndMatch(self)
+                self.lastMatch = currentCtx.compiledMatcher.handleEndMatch(
+                    self)
                 self.contextClose(currentCtx)
             else:
-                self.lastMatch = currentCtx.compiledMatcher.handleStartMatch(self)
+                self.lastMatch = currentCtx.compiledMatcher.handleStartMatch(
+                    self)
         return len(self.context) > 0
 
     def parse(self):
@@ -1300,7 +1375,8 @@ class SimpleParser(object):
 
 def compileParser(simpleParser, metaInfo, metaInfoToKeep, default_units=None, metainfo_units=None, strValueTransform=None):
     """compiles the given simple parser"""
-    parserBuilder = SimpleParserBuilder(simpleParser, metaInfo, metaInfoToKeep, default_units, metainfo_units, strValueTransform)
+    parserBuilder = SimpleParserBuilder(
+        simpleParser, metaInfo, metaInfoToKeep, default_units, metainfo_units, strValueTransform)
     if logger.isEnabledFor(logging.DEBUG):
         s = io.StringIO()
         s.write("matchers:")
@@ -1316,9 +1392,11 @@ def compileParser(simpleParser, metaInfo, metaInfoToKeep, default_units=None, me
         logger.debug(s.getvalue())
     return parserBuilder
 
+
 def runParser(compiledParser, backend, superContext, fIn, uri, path):
     """parses the open file fIn with the given compiledParser into the backend using superContext as parser SuperContext"""
-    parser = compiledParser.buildParser(PushbackLineFile(fIn), backend, superContext = superContext, baseUri = uri, basePath = path)
+    parser = compiledParser.buildParser(PushbackLineFile(
+        fIn), backend, superContext=superContext, baseUri=uri, basePath=path)
     try:
         superContext.startedParsing(fIn.name, parser)
     except AttributeError:
@@ -1327,12 +1405,14 @@ def runParser(compiledParser, backend, superContext, fIn, uri, path):
     parser.parse()
     return parser.parsingStats
 
+
 def defaultParseFile(parserInfo):
     def parseF(parserBuilder, uri, path, backend, superContext):
         with open(path, "r") as fIn:
             backend.startedParsingSession(uri, parserInfo)
             try:
-                parsingStats = runParser(parserBuilder, backend, superContext, fIn, uri, path)
+                parsingStats = runParser(
+                    parserBuilder, backend, superContext, fIn, uri, path)
                 backend.finishedParsingSession(
                     "ParseSuccess", None,
                     parsingStats=parsingStats)
@@ -1343,6 +1423,7 @@ def defaultParseFile(parserInfo):
                     parsingStats={})
     return parseF
 
+
 def mainFunction(mainFileDescription,
                  metaInfoEnv,
                  parserInfo,
@@ -1385,7 +1466,7 @@ def mainFunction(mainFileDescription,
     --verbose writes metainfo to stderr and detailed debug info of parsing process to file detailed.log
 
     If a path to a file is given this is parsed
-""".format(exeName = os.path.basename(sys.argv[0] if len(sys.argv) > 0 else "simple_parser"))
+""".format(exeName=os.path.basename(sys.argv[0] if len(sys.argv) > 0 else "simple_parser"))
     if parseFile is None:
         parseFile = defaultParseFile(parserInfo)
     metaInfo = False
@@ -1455,15 +1536,15 @@ def mainFunction(mainFileDescription,
         else:
             writeComma = True
         metaInfoEnv.embedDeps()
-        metaInfoEnv.serialize(outF.write, subGids = True, selfGid = True)
+        metaInfoEnv.serialize(outF.write, subGids=True, selfGid=True)
         outF.flush()
     dictReader = ParseStreamedDicts(sys.stdin)
-    toOuput = list(metaInfoEnv.infoKinds.keys())
 
     # Initialize the backend where the final results are printed after they are
     # possibly filtered by the caching ActiveBackend.
     if superBackend is None:
-        jsonBackend = JsonParseEventsWriterBackend(metaInfoEnv, outF, writeMatchTelemetry=writeMatchTelemetry)
+        jsonBackend = JsonParseEventsWriterBackend(
+            metaInfoEnv, outF, writeMatchTelemetry=writeMatchTelemetry)
     else:
         jsonBackend = superBackend
 
@@ -1493,19 +1574,20 @@ def mainFunction(mainFileDescription,
             else:
                 onOpen[attr] = [callback]
     backend = ActiveBackend.activeBackend(
-        metaInfoEnv = metaInfoEnv,
-        cachingLevelForMetaName = cachingLevelForMetaName,
-        defaultDataCachingLevel = defaultDataCachingLevel,
-        defaultSectionCachingLevel = defaultSectionCachingLevel,
-        onClose = onClose,
-        onOpen = onOpen,
-        superBackend = jsonBackend,
+        metaInfoEnv=metaInfoEnv,
+        cachingLevelForMetaName=cachingLevelForMetaName,
+        defaultDataCachingLevel=defaultDataCachingLevel,
+        defaultSectionCachingLevel=defaultSectionCachingLevel,
+        onClose=onClose,
+        onOpen=onOpen,
+        superBackend=jsonBackend,
         default_units=default_units,
         metainfo_units=metainfo_units)
     if specialize:
         specializationInfo = dictReader.readNextDict()
         if specializationInfo is None or specializationInfo.get("type", "") != "nomad_parser_specialization_1_0":
-            raise Exception("expected a nomad_parser_specialization_1_0 as first dictionary, got " + json.dumps(specializationInfo))
+            raise Exception(
+                "expected a nomad_parser_specialization_1_0 as first dictionary, got " + json.dumps(specializationInfo))
         metaInfoToKeep = specializationInfo.get("metaInfoToKeep")
 
     if fileToParse:
@@ -1529,13 +1611,15 @@ def mainFunction(mainFileDescription,
                 # create file-specific json backend
                 outFile = open(outFileName, 'w')
                 outFile.write("[")
-                outFilejsonBackend = JsonParseEventsWriterBackend(metaInfoEnv, outFile, writeMatchTelemetry=writeMatchTelemetry)
+                outFilejsonBackend = JsonParseEventsWriterBackend(
+                    metaInfoEnv, outFile, writeMatchTelemetry=writeMatchTelemetry)
                 # override superBackend for this parseFile
                 backend.superBackend = outFilejsonBackend
             logger.warning("# parsing: %s", toRead['mainFile'])
             try:
-                parseFile(parserBuilder, toRead['mainFileUri'], toRead['mainFile'], backend, superContext)
-            except Exception as e:
+                parseFile(
+                    parserBuilder, toRead['mainFileUri'], toRead['mainFile'], backend, superContext)
+            except Exception:
                 logger.error(traceback.format_exc())
             if outFile is not None:
                 # we had the comma already from last round
@@ -1549,6 +1633,7 @@ def mainFunction(mainFileDescription,
                 backend.superBackend = jsonBackend
     outF.write("]\n")
 
+
 class AncillaryParser(object):
     """This is an ancillary parser that can be used inside a running parser to parse external files.
 
@@ -1560,6 +1645,7 @@ class AncillaryParser(object):
     that are already opened by the main parser. You can use cachingLevelForMetaName to set a section to Ignore or Cache.
     Thus, you can still use sections and their onClose triggers, but no opening events are written.
     """
+
     def __init__(self, fileDescription, parser, cachingLevelForMetaName, superContext):
         """Compiles parser and set up backend.
 
@@ -1573,7 +1659,8 @@ class AncillaryParser(object):
         default_units = parser.parserBuilder.default_units
         metainfo_units = parser.parserBuilder.metainfo_units
         # compile parser
-        self.compiledParser = compileParser(fileDescription, parser.parserBuilder.metaInfoEnv, parser.parserBuilder.metaInfoToKeep, default_units, metainfo_units)
+        self.compiledParser = compileParser(fileDescription, parser.parserBuilder.metaInfoEnv,
+                                            parser.parserBuilder.metaInfoToKeep, default_units, metainfo_units)
         # save superContext
         self.superContext = superContext
         # extract onClose functions from superContext
@@ -1585,12 +1672,13 @@ class AncillaryParser(object):
             onOpen[attr] = [callback]
         # create backend for parser
         self.backend = ActiveBackend.activeBackend(
-            metaInfoEnv = parser.parserBuilder.metaInfoEnv,
-            cachingLevelForMetaName = cachingLevelForMetaName,
-            onClose = onClose,
-            onOpen = onOpen,
-            superBackend = parser.backend.superBackend,
-            propagateStartFinishParsing = False,  # write no parser info upon start and end of parsing
+            metaInfoEnv=parser.parserBuilder.metaInfoEnv,
+            cachingLevelForMetaName=cachingLevelForMetaName,
+            onClose=onClose,
+            onOpen=onOpen,
+            superBackend=parser.backend.superBackend,
+            # write no parser info upon start and end of parsing
+            propagateStartFinishParsing=False,
             default_units=default_units,
             metainfo_units=metainfo_units)
 
@@ -1602,14 +1690,17 @@ class AncillaryParser(object):
         """
         currentUri = self.mainParser.uriForPath(fIn.name)
         if currentUri:
-            self.backend.superBackend.addValue("calculation_file_uri", currentUri)
-        runParser(self.compiledParser, self.backend, self.superContext, PushbackLineFile(fIn), self.mainParser.baseUri, self.mainParser.basePath)
+            self.backend.superBackend.addValue(
+                "calculation_file_uri", currentUri)
+        runParser(self.compiledParser, self.backend, self.superContext, PushbackLineFile(
+            fIn), self.mainParser.baseUri, self.mainParser.basePath)
 
 
 class ParserOptimizer(object):
     """For optimizing a hierarchy of SimpleMatchers based on a list of metainfo
     names that should be included in the parsing.
     """
+
     def optimizeParsingTree(self, rootMatcher, metaInfoToKeep=None):
         """This function will remove any parsing unnecessary parsing actions
         from the parsing tree based on the given list of metainfo names to keep
@@ -1634,7 +1725,8 @@ class ParserOptimizer(object):
             parseList = []
             leaves = []
             matchDict = {}
-            self.searchMatchers(rootMatcher, metaInfoToKeep, parseList, leaves, matchDict)
+            self.searchMatchers(rootMatcher, metaInfoToKeep,
+                                parseList, leaves, matchDict)
 
             # If something was not matched, prompt the user
             for name, found in matchDict.items():
@@ -1675,12 +1767,13 @@ class ParserOptimizer(object):
         # See if this matcher has metainfos of interest and add any possible
         # dependencies to the list of metainfos to keep
         found = False
-        for i_name, name in enumerate(metaNames):
+        for _, name in enumerate(metaNames):
             if name in myMetanames:
                 found = True
                 matchDict[name] = True
                 dependencies = simpleMatcher.dependencies.get(name)
-                logger.debug("Found matcher: {}".format(simpleMatcher.startReStr))
+                logger.debug("Found matcher: {}".format(
+                    simpleMatcher.startReStr))
                 simpleMatcher.keep = True
                 parseList.append(simpleMatcher)
 
@@ -1695,7 +1788,8 @@ class ParserOptimizer(object):
 
         # Check the children
         for submatcher in simpleMatcher.subMatchers:
-            self.searchMatchers(submatcher, metaNames, parseList, unParsedleaves, matchDict)
+            self.searchMatchers(submatcher, metaNames,
+                                parseList, unParsedleaves, matchDict)
 
         # Save non-parsed leaves
         if not simpleMatcher.subMatchers and not found:
@@ -1740,8 +1834,8 @@ class ParserOptimizer(object):
             simpleMatcher: The simplematcher that is the target of the operation
         """
         if simpleMatcher.keep:
-            raise Exception("Trying to remove a matcher that should not be removed from tree.")
-            return
+            raise Exception(
+                "Trying to remove a matcher that should not be removed from tree.")
 
         parent = simpleMatcher.superMatcher
         if not parent:
@@ -1754,7 +1848,8 @@ class ParserOptimizer(object):
             children = parent.subMatchers
             for i_child, child in enumerate(children):
                 if child == simpleMatcher:
-                    logger.debug("REMOVED: {}".format(simpleMatcher.startReStr))
+                    logger.debug("REMOVED: {}".format(
+                        simpleMatcher.startReStr))
                     del children[i_child]
                     return
         else: