Commit 2490668f authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Fully removed normalizer dependence on backend.

parent 60d2111b
...@@ -22,7 +22,6 @@ from nomad.datamodel.encyclopedia import ( ...@@ -22,7 +22,6 @@ from nomad.datamodel.encyclopedia import (
Properties, Properties,
Calculation, Calculation,
) )
from nomad.parsing.legacy import Backend
from nomad.normalizing.encyclopedia.context import Context from nomad.normalizing.encyclopedia.context import Context
from nomad.normalizing.encyclopedia.material import MaterialBulkNormalizer, Material2DNormalizer, Material1DNormalizer from nomad.normalizing.encyclopedia.material import MaterialBulkNormalizer, Material2DNormalizer, Material1DNormalizer
from nomad.normalizing.encyclopedia.method import MethodDFTNormalizer, MethodGWNormalizer from nomad.normalizing.encyclopedia.method import MethodDFTNormalizer, MethodGWNormalizer
...@@ -37,10 +36,6 @@ class EncyclopediaNormalizer(Normalizer): ...@@ -37,10 +36,6 @@ class EncyclopediaNormalizer(Normalizer):
within a new section called "encyclopedia". In the future these separate within a new section called "encyclopedia". In the future these separate
metainfos could be absorbed into the existing metainfo hiearchy. metainfos could be absorbed into the existing metainfo hiearchy.
""" """
def __init__(self, backend: Backend):
super().__init__(backend)
self.backend: Backend = backend
def calc_type(self, calc: Calculation) -> str: def calc_type(self, calc: Calculation) -> str:
"""Decides what type of calculation this is: single_point, md, """Decides what type of calculation this is: single_point, md,
geometry_optimization, etc. geometry_optimization, etc.
......
...@@ -15,27 +15,27 @@ ...@@ -15,27 +15,27 @@
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
from typing import List from typing import List
from nomad.parsing import Backend
from nomad.utils import get_logger from nomad.utils import get_logger
from nomad.metainfo import MSection from nomad.metainfo import MSection
from nomad.datamodel import EntryArchive
class Normalizer(metaclass=ABCMeta): class Normalizer(metaclass=ABCMeta):
''' '''
A base class for normalizers. Normalizers work on a :class:`Backend` instance A base class for normalizers. Normalizers work on a :class:`EntryArchive` section
for read and write. Normalizer instances are reused. for read and write. Normalizer instances are reused.
Arguments: Arguments:
backend: The backend used to read and write data from and to. entry_archive: The entry_archive root section of the archive to normalize.
''' '''
domain = 'dft' domain = 'dft'
''' The domain this normalizer should be used in. Default for all normalizer is 'DFT'. ''' ''' The domain this normalizer should be used in. Default for all normalizer is 'DFT'. '''
def __init__(self, backend: Backend) -> None: def __init__(self, entry_archive: EntryArchive) -> None:
self.entry_archive = backend.entry_archive self.entry_archive = entry_archive
try: try:
self.section_run = backend.entry_archive.section_run[0] self.section_run = entry_archive.section_run[0]
except (AttributeError, IndexError): except (AttributeError, IndexError):
self.section_run = None self.section_run = None
self.logger = get_logger(__name__) self.logger = get_logger(__name__)
...@@ -57,8 +57,8 @@ class SystemBasedNormalizer(Normalizer, metaclass=ABCMeta): ...@@ -57,8 +57,8 @@ class SystemBasedNormalizer(Normalizer, metaclass=ABCMeta):
Args: Args:
only_representatives: Will only normalize the `representative` systems. only_representatives: Will only normalize the `representative` systems.
''' '''
def __init__(self, backend: Backend, only_representatives: bool = False): def __init__(self, entry_archive: EntryArchive, only_representatives: bool = False):
super().__init__(backend) super().__init__(entry_archive)
self.only_representatives = only_representatives self.only_representatives = only_representatives
@property @property
......
...@@ -528,7 +528,7 @@ class Calc(Proc): ...@@ -528,7 +528,7 @@ class Calc(Proc):
logger, 'normalizer executed', input_size=self.mainfile_file.size): logger, 'normalizer executed', input_size=self.mainfile_file.size):
with self.use_parser_backend(normalizer_name) as backend: with self.use_parser_backend(normalizer_name) as backend:
try: try:
normalizer(backend).normalize(logger=logger) normalizer(backend.entry_archive).normalize(logger=logger)
except Exception as e: except Exception as e:
self._parser_backend.finishedParsingSession('ParseFailure', [str(e)]) self._parser_backend.finishedParsingSession('ParseFailure', [str(e)])
logger.error( logger.error(
......
...@@ -32,7 +32,7 @@ def run_normalize(backend: Backend) -> Backend: ...@@ -32,7 +32,7 @@ def run_normalize(backend: Backend) -> Backend:
assert status == 'ParseSuccess' assert status == 'ParseSuccess'
for normalizer_class in normalizers: for normalizer_class in normalizers:
normalizer = normalizer_class(backend) normalizer = normalizer_class(backend.entry_archive)
normalizer.normalize() normalizer.normalize()
return backend return backend
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment