...
 
Commits (26)
import logging
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
# ch = logging.StreamHandler()
# ch.setLevel(logging.INFO)
logger=logging.getLogger("nomadcore")
logger.setLevel(logging.WARNING)
logger.addHandler(ch)
# logger.addHandler(ch)
logger2=logging.getLogger("nomad")
logger2.setLevel(logging.WARNING)
logger2.addHandler(ch)
# logger2.addHandler(ch)
def debugToFile():
"makes a full log to a file named detailed.log"
......
......@@ -9,7 +9,9 @@ import os
import copy
import numpy as np
import logging
from future.utils import with_metaclass
from abc import ABCMeta, abstractmethod
from nomadcore.unit_conversion import unit_conversion
from nomadcore.simple_parser import mainFunction
from nomadcore.local_backend import LocalBackend
......@@ -17,9 +19,8 @@ from nomadcore.local_meta_info import load_metainfo
from nomadcore.caching_backend import CachingLevel
from nomadcore.simple_parser import extractOnCloseTriggers, extractOnOpenTriggers
from nomadcore.caching_backend import ActiveBackend
import nomadcore.ActivateLogging
from future.utils import with_metaclass
logger = logging.getLogger("nomad")
logger = logging.getLogger(__file__)
class ParserInterface(with_metaclass(ABCMeta, object)):
......@@ -43,22 +44,34 @@ class ParserInterface(with_metaclass(ABCMeta, object)):
"""
metainfo_env = None
def __init__(self, metainfo_to_keep=None, backend=None, default_units=None, metainfo_units=None, debug=False, log_level=logging.ERROR, store=True):
def __init__(
self, metainfo_to_keep=None, backend=None, default_units=None,
metainfo_units=None, debug=False, log_level=logging.ERROR, store=True):
"""
Args:
main_file: A special file that can be considered the main file of the
calculation.
metainfo_to_keep: A list of metainfo names. This list is used to
optimize the parsing process as optimally only the information
relevant to these metainfos will be parsed.
backend: An object to which the parser will give all the parsed data.
The backend will then determine where and when to output that data.
Arguments:
metainfo_to_keep: A list of metainfo names. This list is used to
optimize the parsing process as optimally only the information
relevant to these metainfos will be parsed.
backend: An object to which the parser will give all the parsed data.
The backend will then determine where and when to output that data.
"""
self.debug = debug
logger.setLevel(log_level)
try:
logger.setLevel(log_level)
except Exception:
# might fail on custom loggers
pass
self.store = store
self.debug = debug
self.initialize(metainfo_to_keep, backend, default_units, metainfo_units)
def setup_logger(self, new_logger):
global logger
logger = new_logger
# tell tests about received logger
new_logger.debug('received logger')
def initialize(self, metainfo_to_keep, backend, default_units, metainfo_units):
"""Initialize the parser with the given environment.
"""
......
This diff is collapsed.
......@@ -10,17 +10,18 @@ import json
import os, re
from nomadcore.json_support import jsonCompactS, jsonCompactD, jsonIndentD
from io import open
import nomad_meta_info
"""objects to handle a local InfoKinds with unique name (think self written json)"""
class InfoKindEl(object):
"""Info kind (tipically from a file, without shas but with locally unique names)"""
__slots__ = ["name","description","kindStr","units","superNames","dtypeStr", "repeats", "shape", "extra_args"]
__slots__ = ["name","description","kindStr","units","superNames","dtypeStr", "repeats", "shape", "extra_args", "package"]
IGNORE_EXTRA_ARGS = 1
ADD_EXTRA_ARGS = 2
RAISE_IF_EXTRA_ARGS = 3
def __init__(self, name, description, kindStr = "type_document_content", units = None, superNames = None,
dtypeStr = None, shape = None, extraArgsHandling = ADD_EXTRA_ARGS, repeats = None, **extra_args):
dtypeStr = None, shape = None, package = None, extraArgsHandling = ADD_EXTRA_ARGS, repeats = None, **extra_args):
if superNames is None:
superNames = []
self.name = name
......@@ -29,6 +30,7 @@ class InfoKindEl(object):
self.superNames = superNames
self.units = units
self.dtypeStr = dtypeStr
self.package = package
if dtypeStr in ["None", "null"]:
self.dtypeStr = None
self.shape = shape
......@@ -183,14 +185,20 @@ class RelativeDependencySolver(object):
self.deps = {}
def __call__(self, infoKindEnv, source, dep):
if "relativePath" not in dep:
raise Exception('Invalid dependency for relativeDependencySolver there must be a relativePath')
basePath = source.get('path')
if "metainfoPath" in dep:
basePath = nomad_meta_info.__file__
path = dep["metainfoPath"]
elif "relativePath" in dep:
basePath = source.get('path')
path = dep["relativePath"]
else:
raise Exception('Invalid dependency for relativeDependencySolver there must be a relativePath or metainfoPath')
if basePath:
baseDir = os.path.dirname(os.path.abspath(basePath))
else:
baseDir = os.getcwd()
dPath = os.path.realpath(os.path.join(baseDir, dep['relativePath']))
dPath = os.path.realpath(os.path.join(baseDir, path))
if dPath in self.deps:
return self.deps[dPath]
depInfo = None
......@@ -456,7 +464,7 @@ class InfoKindEnv(object):
def toJsonList(self, withGids):
infoKinds = list(self.infoKinds.keys())
infoKinds.sort(lambda x, y: self.compareKeys(x.name, y.name))
# infoKinds.sort(lambda x, y: self.compareKeys(x.name, y.name))
return [self.infoKinds[x].toDict(self,
self if withGids else None) for x in infoKinds]
......@@ -522,6 +530,7 @@ class InfoKindEnv(object):
gidToCheck[ii["name"]] = toCheck
del val['superGids']
val['extraArgsHandling'] = extraArgsHandling
val['package'] = self.name
ikEl = InfoKindEl(**val)
if not oldVal is None and ikEl != oldVal:
overwritten.append((oldVal, ikEl))
......@@ -668,7 +677,11 @@ def load_metainfo(filename, dependencyLoader=None, extraArgsHandling=InfoKindEl.
Tuple containing the metainfo environment, and any possible warnings
that were encountered in the loading.
"""
path = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../../../nomad-meta-info/meta_info/nomad_meta_info/{}".format(filename)))
import nomad_meta_info
if os.path.isfile(filename):
path = filename
else:
path = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(nomad_meta_info.__file__)), "{}".format(filename)))
return loadJsonFile(path, dependencyLoader, extraArgsHandling, uri)
def loadJsonStream(fileStream, name = None, dependencyLoader = None, extraArgsHandling = InfoKindEl.ADD_EXTRA_ARGS, filePath = None, uri = None):
......
......@@ -4,9 +4,9 @@ from sqlalchemy.orm import sessionmaker
import logging, sys
logger = logging.getLogger(__name__)
handler = logging.StreamHandler(stream=sys.stdout)
logger.setLevel(logging.INFO)
logger.addHandler(handler)
# handler = logging.StreamHandler(stream=sys.stdout)
# logger.setLevel(logging.INFO)
# logger.addHandler(handler)
Base = declarative_base()
useNested = False
......
This diff is collapsed.
......@@ -15,7 +15,18 @@ from pint import UnitRegistry
logger = logging.getLogger(__name__)
# disable warnings from pint
logging.getLogger("pint").setLevel(logging.ERROR)
ureg = UnitRegistry(os.path.join(os.path.dirname(__file__), "units.txt"))
_ureg_cache = dict()
def ureg_cached(unit):
if unit in _ureg_cache:
return _ureg_cache[unit]
else:
unit_def = ureg(unit)
_ureg_cache[unit] = unit_def
return unit_def
#===============================================================================
......@@ -51,7 +62,7 @@ def convert_unit(value, unit, target_unit=None):
"""
# Check that the unit is valid
unit_def = ureg(unit)
unit_def = ureg_cached(unit)
if not unit_def:
logger.error("Undefined unit given. Cannot do the conversion")
return
......@@ -64,7 +75,7 @@ def convert_unit(value, unit, target_unit=None):
return converted_value.magnitude
else:
# Check that the given target unit is valid
target_unit_def = ureg(target_unit)
target_unit_def = ureg_cached(target_unit)
if not target_unit_def:
logger.error("Undefined target unit given. Cannot do the conversion")
return
......@@ -95,9 +106,9 @@ def convert_unit_function_immediate(unit, target_unit=None):
"""
# Check that the dimensionality of the source and target units match.
if target_unit is not None:
source = ureg(target_unit)
source = ureg_cached(target_unit)
source_dim = source.dimensionality
target = ureg(unit)
target = ureg_cached(unit)
target_dim = target.dimensionality
if source_dim != target_dim:
raise Exception("The dimensionality of unit '{}' does not match the dimensionality of unit '{}'. Cannot do the unit conversion.".format(unit, target_unit))
......
......@@ -6,12 +6,11 @@ ase==3.15.0
setuptools
scipy
mdtraj==1.9.1
panedr==0.2
mdanalysis==0.16.2
panedr==0.2
parmed==3.0.0
pandas
pyyaml
h5py
hjson
enum34
systax==0.1.2
......@@ -18,7 +18,29 @@ def main():
'nomadcore.unit_conversion': ['*.txt'],
'nomadcore.md_data_access': ['test/*'],
'nomadcore.metainfo_storage': ['*.txt'],
}
},
install_requires=[
"future",
"numpy",
"cython",
"Pint==0.7.2",
"ase==3.15.0",
"setuptools",
"scipy",
"panedr==0.2",
"parmed==3.0.0",
"pandas",
"pyyaml",
"h5py",
"hjson",
"enum34"],
extras_require={
"md": [
"mdtraj==1.9.1",
"mdanalysis==0.16.2"
]
},
)
# Run main function by default
......