Skip to content
Snippets Groups Projects
Commit 0c5adafd authored by Mohamed, Fawzi Roberto (fawzi)'s avatar Mohamed, Fawzi Roberto (fawzi)
Browse files

initial commit

parents
Branches
Tags 0.0.0
No related merge requests found
import os, logging
from nomadcore.local_meta_info import InfoKindEl, InfoKindEnv, loadJsonFile
logger = logging.getLogger(__name__)
baseDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
metaInfoDir = os.path.normpath(os.path.join(baseDir, "../nomad_meta_info"))
allMetaInfo = {}
for dirpath, dirnames, filenames in os.walk(metaInfoDir):
for filename in filenames:
if not filename.endswith(".nomadmetainfo.json"):
continue
filepath = os.path.join(dirpath, filename)
try:
newEnv, warnings = loadJsonFile(filepath, dependencyLoader = None, extraArgsHandling = InfoKindEl.ADD_EXTRA_ARGS, uri = None)
if warnings.get("hasWarnings", True):
logger.warn("loading %s had the following warnings: %s" %
(filepath, warnings))
allMetaInfo[filepath] = newEnv
except:
logger.exception("nomadcore.basic_meta_info could not load %s", filepath)
baseMetaInfo = allMetaInfo[os.path.join(metaInfoDir, "nomad_base.nomadmetainfo.json")]
metaMetaInfo = allMetaInfo[os.path.join(metaInfoDir, "meta_types.nomadmetainfo.json")]
import hashlib
import base64
class CompactHash(object):
def __init__(self, proto):
self._proto = proto
def b64digest(self):
return base64.b64encode(self.digest(), "-_")[:-2]
def b32digest(self):
res = base64.b32encode(self.digest())
return res[:res.index('=')]
def update(self, data):
if type(data) == unicode:
data=data.encode("utf-8")
return self._proto.update(data)
def __getattr__(self, name):
return getattr(self._proto, name)
def sha224(*args, **kwargs):
return CompactHash(hashlib.sha224(*args,**kwargs))
def sha512(*args, **kwargs):
return CompactHash(hashlib.sha512(*args,**kwargs))
def md5(*args, **kwargs):
return CompactHash(hashlib.md5(*args,**kwargs))
import json
"""Various functions to simplify and standardize dumping objects to json"""
class ExtraIndenter(object):
"""Helper class to add extra indent at the beginning of every line"""
def __init__(self, fStream, extraIndent):
self.fStream = fStream
self.indent = " " * extraIndent if extraIndent else ""
def write(self, val):
i = 0
while True:
j = val.find("\n", i)
if j == -1:
fStream.write(val[i:])
return
j += 1
fStream.write(val[i:j])
fStream.write(self.indent)
i = j
def jsonCompactF(obj, fOut, check_circular = False):
"""Dumps the object obj with a compact json representation using the utf_8 encoding
to the file stream fOut"""
json.dump(obj, fOut, sort_keys = True, indent = None, separators = (',', ':'),
ensure_ascii = False, check_circular = check_circular)
def jsonIndentF(obj, fOut, check_circular = False, extraIndent = None):
"""Dumps the object obj with an indented json representation using the utf_8 encoding
to the file stream fOut"""
fStream = fOut
if extraIndent:
fStream = ExtraIndenter(fOut, extraIndent = extraIndent)
json.dump(obj, fStream, sort_keys = True, indent = 2, separators = (',', ': '),
ensure_ascii = False, check_circular = check_circular)
class DumpToStream(object):
"""transform a dump function in a stream"""
def __init__(self, dumpF, extraIndent = None):
self.baseDumpF = dumpF
self.extraIndent = extraIndent
self.indent = " " * extraIndent if extraIndent else ""
self.dumpF = self.dumpIndented if extraIndent else dumpF
def dumpIndented(self, val):
if type(val) == unicode:
val = val.encode("utf_8")
i = 0
while True:
j = val.find("\n", i)
if j == -1:
self.baseDumpF(val[i:])
return
j += 1
self.baseDumpF(val[i:j])
self.baseDumpF(self.indent)
i = j
def write(self, val):
self.dumpF(val)
def jsonCompactD(obj, dumpF, check_circular = False):
"""Dumps the object obj with a compact json representation using the utf_8 encoding
to the file stream fOut"""
json.dump(obj, DumpToStream(dumpF), sort_keys = True, indent = None, separators = (', ', ': '),
ensure_ascii = False, check_circular = check_circular)
def jsonIndentD(obj, dumpF, check_circular = False, extraIndent = None):
"""Dumps the object obj with an indented json representation using the utf_8 encoding
to the function dumpF"""
json.dump(obj, DumpToStream(dumpF, extraIndent = extraIndent), sort_keys = True, indent = 2, separators = (',', ': '),
ensure_ascii = False, check_circular = check_circular, encoding="utf_8")
def jsonCompactS(obj, check_circular = False):
"""returns a compact json representation of the object obj as a string"""
return json.dumps(obj, sort_keys = True, indent = None, separators = (', ', ': '),
ensure_ascii = False, check_circular = check_circular, encoding="utf_8")
def jsonIndentS(obj, check_circular = False, extraIndent = None):
"""retuns an indented json representation if the object obj as a string"""
res = json.dumps(obj, sort_keys = True, indent = 2, separators = (',', ': '),
ensure_ascii = False, check_circular = check_circular, encoding="utf_8")
if extraIndent:
indent = " " * extraIndent
res = res.replace("\n", "\n" + indent)
return res
def jsonDump(obj, path):
"""Dumps the object obj to an newly created utf_8 file at path"""
kwds = dict()
if sys.version_info.major > 2:
kwds["encoding"] = "utf_8"
with open(path, "w", **kwds) as f:
jsonIndentF(obj, f)
class ShaStreamer(object):
"""a file like object that calculates one or more shas"""
def __init__(self, shas = None):
self.shas = shas
if shas is None:
self.shas = (CompactSha.sha224(),)
def write(self, val):
for sha in self.shas:
sha.update(val)
def b64digests(self):
return [sha.b32digest() for sha in self.shas]
def addShasOfJson(obj, shas = None):
"""adds the jsonDump of obj to the shas"""
streamer = ShaStreamer(shas)
jsonCompactF(obj, streamer)
return streamer
def normalizedJsonGid(obj, shas = None):
"""returns the gid of the standard formatted jsonDump of obj"""
return map(lambda x: 'j' + x, addShasOfJson(shas).b64digests())
This diff is collapsed.
This diff is collapsed.
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine, exc
from sqlalchemy.orm import sessionmaker
import logging, sys
logger = logging.getLogger(__name__)
handler = logging.StreamHandler(stream=sys.stdout)
logger.setLevel(logging.INFO)
logger.addHandler(handler)
Base = declarative_base()
useNested = False
useJson = False
if useJson:
from sqlalchemy.dialects.postgresql import JSON, JSONB
def compareSorted(oldValues, newValues):
toAdd, toRemove = [], []
iNew, iOld = 0, 0
while iNew < len(newValues) and iOld < len(oldValues):
if newValues[iNew] < oldValues[iOld]:
toAdd.append(newValues[iNew])
iNew += 1
elif newValues[iNew] > oldValues[iOld]:
toRemove.append(oldValues[iOld])
iOld += 1
else:
iNew += 1
iOld += 1
toAdd += newValues[iNew:]
toRemove += oldValues[iOld:]
return (toRemove, toAdd)
def createEngine(engineStr='sqlite:///:memory:', echo = True):
return create_engine(engineStr, echo = echo)
def createDB(engine):
Base.metadata.create_all(engine)
def createSession(engine):
return sessionmaker(bind=engine)()
def get_or_create(cls, session, defaults=None, **kwds):
result = session.query(cls).filter_by(**kwds).first()
if result:
return result, True
newVals=defaults
if defaults is None:
newVals={}
newVals.update(kwds)
result = cls(**newVals)
if useNested:
nestedSession = session.begin_nested()
nestedSession.add(result)
try:
nestedSession.commit()
except exc.IntegrityError:
nestedSession.rollback()
result = session.query(cls).filter_by(**kwds).one()
else:
session.add(result)
session.flush()
return result, True
This diff is collapsed.
import readline # optional, will allow Up/Down/History in the console
import code
def goInteractive(locals):
"""Debugging function that when called stops execution and drops in an interactive loop.
Exiting the interpreter will continue execution.
call as follows:
goInteractive(locals())
"""
vars = globals().copy()
vars.update(locals)
shell = code.InteractiveConsole(vars)
shell.interact()
#!/usr/bin/env python
import sys, os, os.path, datetime, logging, json
basePath = os.path.realpath(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if not basePath in sys.path:
sys.path.append(basePath)
from nomadcore.local_meta_info import InfoKindEnv, InfoKindEl, loadJsonFile, loadJsonStream
from nomadcore.json_support import jsonIndentF, jsonCompactF
import git
class GitTreeDependencySolver:
def __init__(self, tree, basePath, context):
self.tree = tree
self.basePath = basePath
self.context = context
self.deps = {}
def __call__(self, infoKindEnv, source, dep):
if not dep.has_key("relativePath"):
raise Exception('Invalid dependency for relativeDependencySolver there must be a relativePath')
dPath = os.path.normpath(os.path.join(self.basePath, dep.get('relativePath')))
if dPath in self.deps:
return self.deps[dPath]
depInfo = None
depIKEnv = InfoKindEnv(name = os.path.basename(dPath), path = dPath, dependencyLoader=infoKindEnv.dependencyLoader)
self.deps[dPath] = depIKEnv
try:
f=self.tree[dPath].data_stream
except:
print self.tree.hexsha, ", t.path", self.tree.path, ", bpath:", self.basePath, ", dPath:", dPath
raise Exception("Failed to resolve dependency {0!r} in {context}, due to exception {1}".format(dep, sys.exc_value, context = self.context))
depInfo = json.load(f)
if depInfo:
depIKEnv.fromJsonList(depInfo, source = { 'path': dPath }, dependencyLoad = True)
return depIKEnv
def loadPath(path, loadOverrides = False):
"""loads all nomadmetainfo.json files within the given path.
If loadOverrides is true then also nomadmetainfooverrides.json files are loaded"""
allEnvs = {}
allOverrides = {}
hasWarnings = False
if os.path.isdir(path):
for dirpath, dirnames, filenames in os.walk(path):
for filename in filenames:
if filename.endswith(".nomadmetainfo.json"):
filepath = os.path.join(dirpath, filename)
env, warnings = loadJsonFile(filepath, extraArgsHandling = InfoKindEl.ADD_EXTRA_ARGS)
if warnings.get("duplicates", False) or warnings.get("overrides", False):
hasWarnings = True
logging.warn("{0!r}: warnings in loadJsonFile:{1}\n".format(filepath, warnings))
allEnvs[os.path.relpath(filepath, path)] = env
elif loadOverrides and filename.endswith(".nomadmetainfooverrides.json"):
try:
with open(filename) as f:
overrides = json.load(f)
except:
hasWarnings = True
logging.exception("Error loading %r", filename)
else:
allOverrides[os.path.relpath(filepath, path)] = overrides
return {"hasWarnings": hasWarnings, "envs": allEnvs, "overrides": allOverrides}
def loadRef(rev, path, loadOverrides = False):
"""loads all nomadmetainfo.json files within path from repo reference"""
allEnvs = {}
allOverrides = {}
hasWarnings = False
if path == ".":
path = ""
if path:
tree = rev.tree[path]
else:
tree = rev.tree
def loadBlobHasWarnings(obj):
hasW = False
if obj.type != 'blob':
return False
rPath = os.path.relpath(obj.path, path)
if obj.name.endswith(".nomadmetainfo.json"):
env, warnings = loadJsonStream(obj.data_stream, GitTreeDependencySolver(rev.tree, os.path.dirname(obj.path), obj.path),
filePath = obj.path)
allEnvs[rPath] = env
dup = warnings.get("duplicates")
hid = warnings.get("hidden")
if dup or hid:
hasW = True
logging.warn("loading {path} of revision {rev}: {warnings}\n".format(
path = obj.path, rev = rev.name_rev, warnings = warnings))
elif obj.name.endswith(".nomadmetainfooverrides.json"):
try:
overrides = json.load(obj.data_stream)
except:
hasW = True
logging.exception("Error loading %r in revision %s", obj.path, rev.hexsha[:10])
else:
allOverrides[rPath] = overrides
return hasW
if tree.type == 'blob':
if loadBlobHasWarnings(tree):
hasWarnings = True
else:
for obj in tree.traverse(lambda i, d: i.type == 'blob' and (i.path.endswith(".nomadmetainfo.json") or
(loadOverrides and i.path.endswith(".nomadmetainfooverrides.json")))):
if loadBlobHasWarnings(obj):
hasWarnings = True
return {"hasWarnings": hasWarnings, "envs": allEnvs, "overrides": allOverrides}
def insertDicList(dict, key, value):
if key in dict:
dict[key].append(value)
else:
dict[key] = [value]
def prepareValues(values, name):
"prepares the values computing gids"
allNames = {}
hasWarnings = False
for path, env in values.items():
try:
env.calcGids()
except:
hasWarnings = True
logging.warn("Error calculating gids of {path!r} {name}: {exc}\n".format(path = path, name = name, exc = sys.exc_value))
noDepNames = env.noDepNames()
for name, gid in env.gids.items():
if name in noDepNames:
insertDicList(allNames, name, (path, gid))
return {"allNames": allNames, "envs": values, "hasWarnings": hasWarnings}
def cmpOverrides(o1,o2):
c = cmp(o1["name"], o2["name"])
if c != 0:
return c
return cmp(o1, o2)
def buildOverrides(oldValues, newValues):
oldV = prepareValues(oldValues, "(old)")
newV = prepareValues(newValues, "(new)")
oldNames = oldV["allNames"]
newNames = newV["allNames"]
overrides = []
complexOverrides = []
additions = []
removals = []
hasWarnings = oldV["hasWarnings"] or newV["hasWarnings"]
nMatched = 0
for name, oldUses in oldNames.items():
if not name in newNames:
removals.append({"name": name, "oldGids": map(lambda x: x[1], oldUses), "newGids": []})
continue
newUses = newNames[name]
newNoMatch = list(newUses)
oldNoMatch = []
for oldPath, oldGid in oldUses:
found = -1
for i in range(len(newNoMatch)):
newPath, newGid = newNoMatch[i]
if newGid == oldGid:
del newNoMatch[i]
found = i
break
if found != -1:
nMatched += 1
else:
oldNoMatch.append((oldPath, oldGid))
if not oldNoMatch and not newNoMatch:
continue
if len(oldNoMatch) == 1 and len(newNoMatch) == 1:
overrides.append({"name": name, "oldGid": oldNoMatch[0][1], "newGid": newNoMatch[0][1]})
continue
# try path matching
iOld = 0
while iOld < len(oldNoMatch):
oldPath, oldGid = oldNoMatch[iOld]
found = -1
for iNew in range(len(newNoMatch)):
newPath, newGid = newNoMatch[iNew]
if newPath == oldPath:
overrides.append({"name": name, "oldGid": oldGid, "newGid": newGid})
del newNoMatch[iNew]
del old
found = iNew
break
if found != -1:
del oldNoMatch[iOld]
else:
iOld += 1
if len(oldNoMatch) == 1 and len(newNoMatch) == 1:
overrides.append({"name": name, "oldGid": oldNoMatch[0][1], "newGid": newNoMatch[0][1]})
if not oldNoMatch and not newNoMatch:
continue
elif oldNoMatch and not newNoMatch:
removals.append({"name": name, "oldGids": map(lambda x: x[1], oldNoMatch), "newGids": []})
elif not oldNoMatch and newNoMatch:
additions.append({"name": name, "oldGids": [], "newGids": map(lambda x: x[1], newNoMatch)})
else: # oldNoMatch and newNoMatch
complexOverrides.append({"name": name, "oldGids": oldNoMatch, "newGids": newNoMatch[0][1]})
for name in newNames.keys():
if not name in oldNames:
additions.append({"name": name, "oldGids": [], "newGids": map(lambda x: x[1], newNames[name])})
overrides.sort(cmpOverrides)
complexOverrides.sort(cmpOverrides)
additions.sort(cmpOverrides)
removals.sort(cmpOverrides)
return {
"oldNames": oldNames,
"newNames": newNames,
"overrides": overrides,
"complexOverrides": complexOverrides,
"additions": additions,
"removals": removals,
"hasWarnings": hasWarnings,
"nMatched": nMatched
}
if __name__ == "__main__":
overridesDir = os.path.normpath(os.path.join(basePath,"../nomad_meta_info_overrides"))
defaultPath = os.path.normpath(os.path.join(basePath,"../nomad_meta_info"))
usage = """usage: {command} [--check-only] [--old-ref <ref1=HEAD>] [--old-path <path=None>] [--help]
[--new-ref <ref2=None>] [--new-path <path=basePath>] [--repo-path <repoPath=None>]
[--overrides-dir <overridesDir>] [--no-clobber] [--verbose] [<basePath>]
Generates the overrides for the kind info with the same name but different sha from the old version
to the new version.
Old version can be given as a revision in git (defaults to HEAD) with --old-ref, or a path with
--old-path, likewise the new version can be given as a revision with --new-ref or new-path.
basePath (which defaults to {defaultPath}) gives the path of which the revisions should be compared (might be a
subdirectory or file of the repo) and if no --new-ref is given the version checked out there is
directly the new vesion.
The repository path by default is found looking from path upward for a .git repository, but can
also be given explicitly with --repo-path.
overridesDir defaults to {overridesDir}
By default output goes to stdout, but an explicit --out-file can also be given
""".format(command = os.path.basename(sys.argv[0]), overridesDir = overridesDir, defaultPath = defaultPath)
oldRef = None
oldPath = None
newRef = None
newPath = None
repoPath = None
path = None
checkOnly = False
noClobber = False
verbose = False
iArg = 1
while iArg < len(sys.argv):
arg = sys.argv[iArg]
if arg == "--help":
sys.stderr.write(usage)
sys.exit(0)
elif arg == "--check-only":
checkOnly = True
elif arg == "--no-clobber":
noClobber = True
elif arg == "--verbose":
verbose = True
elif arg == "--old-ref":
iArg += 1
if iArg < len(sys.argv):
oldRef = sys.argv[iArg]
else:
sys.stderr.write("Error: missing reference after --old-ref\n\n")
sys.stderr.write(usage)
sys.exit(1)
elif arg == "--old-path":
iArg += 1
if iArg < len(sys.argv):
oldPath = sys.argv[iArg]
else:
sys.stderr.write("Error: missing path after --old-path\n\n")
sys.stderr.write(usage)
sys.exit(2)
elif arg == "--new-ref":
iArg += 1
if iArg < len(sys.argv):
newRef = sys.argv[iArg]
else:
sys.stderr.write("Error: missing reference after --new-ref\n\n")
sys.stderr.write(usage)
sys.exit(3)
elif arg == "--new-path":
iArg += 1
if iArg < len(sys.argv):
newPath = sys.argv[iArg]
else:
sys.stderr.write("Error: missing path after --new-path\n\n")
sys.stderr.write(usage)
sys.exit(4)
elif arg == "--repo-path":
iArg += 1
if iArg < len(sys.argv):
repoPath = sys.argv[iArg]
else:
sys.stderr.write("Error: missing path after --repo-path\n\n")
sys.stderr.write(usage)
sys.exit(5)
elif arg == "--overrides-dir":
iArg += 1
if iArg < len(sys.argv):
overridesDir = sys.argv[iArg]
else:
sys.stderr.write("Error: missing reference after --overrides-dir\n\n")
sys.stderr.write(usage)
sys.exit(7)
elif path is None:
path = arg
else:
sys.stderr.write("Error: unexpected extra argument {0!r}\n\n".format(arg))
sys.stderr.write(usage)
sys.exit(8)
iArg += 1
if path is None:
path = defaultPath
requiresRepo = True
if oldPath is None and oldRef is None:
oldRef = "HEAD"
if not newRef and not newPath:
newPath = path
if oldPath and not newRef:
requiresRepo = False
if oldRef:
sys.stderr.write("Error: both --old-path and --old-ref were given this is not supported\n\n")
sys.stderr.write(usage)
sys.exit(9)
if requiresRepo and repoPath is None:
repoPath = os.path.normpath(os.path.abspath(path))
while not os.path.exists(os.path.join(repoPath, ".git")):
if repoPath == '/':
sys.stderr.write("Error: path {0!r} from is not within a git repo.\nCould not find .git directory, for bare repositories use the --repo-path option\n\n".format(arg))
sys.stderr.write(usage)
sys.exit(10)
repoPath = os.path.split(repoPath)[0]
if repoPath:
repo = git.Repo(repoPath)
if os.path.abspath(path).startswith(repoPath) and os.path.exists(os.path.abspath(path)):
relPath = os.path.relpath(os.path.normpath(os.path.abspath(path)), repoPath)
else:
relPath = path
if requiresRepo and relPath.startswith("../"):
sys.stderr.write("Error: basePath should be in the repo, i.e. within {0!r}\n\n".format(repoPath))
sys.stderr.write(usage)
sys.exit(11)
oldName = ""
if oldRef:
oldRev = repo.rev_parse(oldRef)
oldName = oldRev.hexsha[:10]
oldValues = loadRef(oldRev, relPath)
sys.stdout.write("<old>: {relPath} in {oldRef}({oldName}) of repo at {repoPath}\n".format(
relPath = relPath, oldRef = oldRef, oldName = oldName, repoPath = repoPath))
else:
sys.stdout.write("<old>: {oldPath}\n".format(oldPath = oldPath))
oldValues = loadPath(oldPath)
newName = ""
if newRef:
newRev = repo.rev_parse(newRef)
newName = newRev.hexsha[:10]
newValues = loadRef(newRev, relPath)
sys.stdout.write("<new>: {relPath} in {newRef}({newName}) of repo at {repoPath}\n".format(
relPath = relPath, newRef = newRef, newName = newName, repoPath = repoPath))
else:
sys.stdout.write("<new>: {newPath}\n".format(newPath = newPath))
newValues = loadPath(newPath)
res = buildOverrides(oldValues["envs"], newValues["envs"])
if verbose:
jsonIndentF(res, sys.stdout, check_circular = False)
sys.stdout.write("\n")
sys.stdout.flush()
overrides = res["overrides"]
if overrides and not checkOnly:
overridesName = os.path.join(overridesDir, "{old}_{new}_{date}.nomadmetainfooverrides.json"
.format(old = oldName, new = newName, date = datetime.date.today().isoformat()))
n = 1
while noClobber and os.path.exists(overridesName):
overridesName = os.path.join(overridesDir, "{old}_{new}_{date}_{n}.nomadmetainfooverrides.json"
.format(old = oldName, new = newName, date = datetime.date.today().isoformat()), n = n)
n += 1
if os.path.exists(overridesName):
overwriteStr = "Overwriting"
else:
overwriteStr = "Writing"
with open(overridesName, "w") as f: # use os.open to guarantee no clashes seems like an overkill
jsonIndentF(overrides, f, check_circular = False)
sys.stdout.write("{0} overrides in {1!r}\n".format(overwriteStr, os.path.abspath(overridesName)))
hasWarnings = oldValues["hasWarnings"] or newValues["hasWarnings"] or res["hasWarnings"]
removals = res["removals"]
if removals:
sys.stdout.write("removals: ")
jsonCompactF(map(lambda x: x["name"], removals), sys.stdout)
sys.stdout.write("\n")
additions = res["additions"]
if additions:
sys.stdout.write("additions: ")
jsonCompactF(map(lambda x: x["name"], additions), sys.stdout)
sys.stdout.write("\n")
complexOverrides = res["complexOverrides"]
if complexOverrides:
hasWarnings = True
sys.stdout.write("Warning: complexOverrides: ")
jsonCompactF(map(lambda x: x["name"], complexOverrides), sys.stdout)
sys.stdout.write("\n")
sys.stdout.write("#overrides:{nov:3} #matches:{nm:3} #complexOverrides:{ncov:3} #additions:{nadd:3} #removals:{nrm}\n"
.format(nm = res["nMatched"], nov = len(overrides), ncov = len(complexOverrides), nadd = len(additions), nrm = len(removals)))
if not hasWarnings:
sys.exit(0)
else:
sys.stderr("Had warnings\n")
sys.exit(1)
#!/usr/bin/env python
"""Loads the given info in the specified db"""
import sys, os, os.path, logging
basePath = os.path.realpath(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if not basePath in sys.path:
sys.path.append(basePath)
from nomadcore.local_meta_info import loadJsonFile
from nomadcore.model_meta_info import InfoKind
from nomadcore.model_base import createEngine, createDB, createSession
from nomadcore.basic_meta_info import metaMetaInfo
from nomadscripts.calculate_meta_info_overrides import loadPath, loadRef
import sqlite3
def addToDb(parsed, session):
"""loads a nomadmetainfo from the given file"""
for filepath, env in parsed['envs'].items():
try:
InfoKind.ensureInfoKindEnv(env, session)
except:
logging.exception("Error adding %s", filepath)
for filepath, overrides in parsed['overrides'].items():
pass # to do
if __name__ == "__main__":
defaultPath = os.path.normpath(os.path.join(basePath, "../nomad_meta_info"))
defaultDb = "sqlite:///" + os.path.normpath(os.path.join(basePath, "../default.sqlite"))
usage = """{programName} [--db <dbString>] [--[no-]create-db] [--echo-sql] [--ref <repoRef> | --tags-matching <tagRegexp>]
[--[no-]overrides] [--set-as-default] [--repo-path path] [path1 ...]
loads the kindinfo.json files from basePath (defaults to {defaultPath})
into the database reached by the given dbString (defaults to {defaultDb}).
If repoRef is given then basePath has to be in a git repository, and the files
are the the repoRef version.
If tagRegexp is given all tags matching the given regexp are added to the db.
By default overrides are not added to the db, but one can add them with --overrides.
with --set-as-default it is ensured that the given version is see and the latest official version.
--create-db creates the db, and --echo-sql echoes all sql commands.
For sqlite connection if the file does not exist, then --create-db is automatically implied.
""".format(programName = os.path.basename(sys.argv[0]),
defaultDb = defaultDb, defaultPath = defaultPath)
db = defaultDb
iArg = 0
nArgs = len(sys.argv)
pathToLoad = []
echoSql = False
createDb = None
overrides = False
makeDefault = False
refName = None
tagRegExp = None
repoPath = None
while iArg + 1 < nArgs:
iArg += 1
arg = sys.argv[iArg]
if arg == "--db":
iArg += 1
if iArg < nArgs:
db = sys.argv[iArg]
elif arg == "--create-db":
createDb = True
elif arg == "--no-create-db":
createDb = False
elif arg == "--echo-sql":
echoSql = True
elif arg == "--help":
sys.stdout.write(usage)
sys.exit(0)
elif arg == "--ref":
if tagRegExp:
sys.stderr.write("\nError: defined both --ref and --tags-matching\n\n")
sys.sterr.write(usage)
sys.exit(1)
elif ref:
sys.stderr.write("\nError: multiple definitions of --ref\n\n")
sys.sterr.write(usage)
sys.exit(2)
iArg += 1
if iArg < nArgs:
refName = sys.argv[iArg]
else:
sys.stderr.write("\
sys.stderr.wError: missing reference after --ref\n\n")
sys.sterr.write(usage)
sys.exit(2)
elif arg == "--tags-matching":
if ref:
sys.stderr.write("\nError: defined both --ref and --tags-matching\n\n")
sys.sterr.write(usage)
sys.exit(3)
elif tagRegExp:
sys.stderr.write("\nError: multiple definitions of --tags-matching\n\n")
sys.sterr.write(usage)
sys.exit(4)
iArg += 1
if iArg < nArgs:
refName = sys.argv[iArg]
else:
sys.stderr.write("\nError: missing reference after --tags-matching\n\n")
sys.sterr.write(usage)
sys.exit(5)
elif arg == '--overrides':
overrides = True
elif arg == '--no-overrides':
overrides = False
elif arg == '--set-as-default':
makeDefault = True
elif arg == "--repo-path":
iArg += 1
if iArg < len(sys.argv):
repoPath = sys.argv[iArg]
else:
sys.stderr.write("\nError: missing path after --repo-path\n\n")
sys.stderr.write(usage)
sys.exit(5)
elif os.path.exists(arg) or refName or tagRegExp:
pathToLoad.append(arg)
else:
sys.stderr.write("\nError: expected a valid path\n\n")
sys.stderr.write(usage)
sys.exit(6)
if not pathToLoad:
pathToLoad.append(defaultPath)
if db.startswith('sqlite://') and createDb is None:
dbPath = db[len('sqlite://'):]
if not os.path.exists(dbPath) and os.path.exists(os.path.dirname(dbPath)):
createDb = True
engine = createEngine(db, echo = echoSql)
if createDb:
createDB(engine)
session = createSession(engine)
if refName or tagRegExp:
requireRepo = True
else:
requireRepo = False
InfoKind.ensureInfoKindEnv(metaMetaInfo, session)
if requireRepo and repoPath is None:
repoPath = os.path.normpath(os.path.abspath(pathToLoad[0]))
while not os.path.exists(os.path.join(repoPath, ".git")):
if repoPath == '/':
sys.stderr.write("Error: path {0!r} from is not within a git repo.\nCould not find .git directory, for bare repositories use the --repo-path option\n\n".format(pathToLoad[0]))
sys.stderr.write(usage)
sys.exit(10)
repoPath = os.path.split(repoPath)[0]
if refName:
rev = repo.rev_parse(refName)
revSha = oldRev.hexsha[:10]
for path in pathToLoad:
rPath = os.path.relpath(path, repoPath)
try:
env = loadRef(rev, rPath, loadOverrides = overrides)
addToDb(env, session, )
except:
logging.exception("for path %s in revision %s (%s)", rPath, refName, revSha)
elif tagRegExp:
tagRe = re.compile(tagRegExp)
repo = git.Repo(repoPath)
for tag in repo.tags:
if tagRe.match(tag.name):
try:
ref = tag.ref
except:
logging.exception("failed to resolve tag %s", tag.name)
else:
for path in pathToLoad:
rPath = os.path.relpath(path, repoPath)
try:
env = loadRef(ref, rPath, loadOverrides = overrides)
addToDb(env, session)
except:
logging.exception("for path %s in tag %s (%s)", rPath, tag.name, ref.hexsha[:10])
else:
for path in pathToLoad:
env = loadPath(path, loadOverrides = overrides)
addToDb(env, session)
session.commit()
#!/usr/bin/env python
"""
Normalizes i.e. checks consistency, completness, absence of duplicates, and reformats meta_info data
"""
import sys, os, os.path, datetime
basePath = os.path.realpath(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if not basePath in sys.path:
sys.path.append(basePath)
from nomadcore.local_meta_info import InfoKindEnv, loadJsonFile, InfoKindEl
from nomadcore.json_support import jsonIndentD, jsonCompactD
from nomadcore.compact_sha import sha512
def normalizeFile(path, checkOnly, force, addGid, addSubGids, extraArgsHandling, s = sys.stderr.write):
"""Normalizes the meta info file at the given path"""
env, warnings = loadJsonFile(path, extraArgsHandling = extraArgsHandling)
hasErrors = False
ovr = warnings.get("overrides")
relpath = os.path.relpath(path)
if ovr:
hasErrors = True
s("Error: loading {0!r}, found duplicates:\n".format(relpath))
for old, new in ovr:
jsonIndentD(old, s, extraIndent = 4)
s(" => ")
jsonIndentD(new, s, extraIndent = 4)
s("\n")
dup = warnings.get("duplicates")
if dup:
hasErrors = True
s("Error: loading {0!r},\n the depenendencies already define kinds with the following names:\n ".format(relpath))
s(str(dup))
s("\n")
hid = warnings.get("hidden")
if hid:
hasErrors = True
s("Error: loading {0!r},\n the depenendencies already define different kinds with the same names:\n ".format(
relpath))
s(str(map(lambda x: x.toDict(), hid)))
s("\n")
res = {"path":path, "env": env, "hasErrors": hasErrors, "hasChanges": False}
try:
env1 = InfoKindEnv(name = env.name, description = env.description, infoKinds=env.infoKinds.values(), path=env.path, uri=env.uri, deps=env.deps)
env1.calcGids()
validGids = True
except:
validGids = False
hasErrors = True
s("Error: parsing {0!r} failed to re-generate gids: {1}\n".format(relpath, sys.exc_info()[1]))
if (hasErrors and not force):
return res
if env.gids and validGids:
changes = []
noDepNames = env.noDepNames()
for k, v in env.gids.items():
if k in noDepNames and k in env1.gids and env1.gids.get(k) != v:
changes.append(k)
if changes:
s("Info: {0!r} regenerating gids changed for: ".format(relpath))
jsonCompactD(changes, s)
s("\n")
if validGids:
env = env1
oldSha = None
with open(path) as f:
sha = sha512()
while True:
block = f.read(8*1024)
if not block:
break
sha.update(block)
oldSha = sha.b64digest()
newS = sha512()
try:
env.serialize(newS.update, subGids = addSubGids, selfGid = addGid)
newSha = newS.b64digest()
except:
hasErrors = True
res["hasErrors"] = hasErrors
s("Error: serialization of {0!r} failed: {1}".format(relpath, sys.exc_info()[1]))
return res
backup = None
if (oldSha == newSha):
s(repr(path))
s(": OK, no change\n")
else:
backupBase = path + "-" + datetime.date.today().isoformat()
extra = ""
n = 0
while os.path.exists(backupBase + extra + ".bk"):
n += 1
extra = "-" + str(n)
backup = backupBase + extra + ".bk"
if not checkOnly:
os.rename(path, backup)
with open(path, "wb") as f:
env.serialize(f.write, subGids = addSubGids, selfGid = addGid)
s(repr(path))
s(": %s, has changes, backup in " % ("OK" if not hasErrors else "tying to fix ERRORS"))
s(os.path.basename(backup))
s("\n")
res["hasChanges"] = True
res["backup"] = backup
return res
def normalizePath(path, checkOnly, force, addGid, addSubGids, extraArgsHandling, s = sys.stderr.write):
"""Normalizes all the nomadmetainfo.json files at the given path"""
if (os.path.isfile(path)):
return normalizeFile(path, checkOnly=checkOnly, force=force, addGid=addGid, addSubGids=addSubGids, extraArgsHandling=extraArgsHandling, s=s)
envNames = {}
allNames = set()
hasErrors = False
hasChanges = False
clashes = set()
for dirpath, dirnames, filenames in os.walk(path):
for filename in filenames:
filepath = os.path.join(dirpath, filename)
if not filename.endswith(".nomadmetainfo.json"):
if not filename.endswith(".bk") and not filename.startswith("."):
s("skipping ")
s(filepath)
s("\n")
continue
fileRes = normalizeFile(filepath, checkOnly=checkOnly, force=force,
addGid=addGid, addSubGids=addSubGids, extraArgsHandling=extraArgsHandling, s=s)
hasChanges = hasChanges or fileRes.get("hasChanges", False)
hasErrors = hasErrors or fileRes.get("hasErrors", False)
newNames = fileRes.get("env").noDepNames()
envNames[filepath] = newNames
newClashes = allNames.intersection(newNames)
allNames.update(newNames)
clashes.update(newClashes)
detailedClashes = {}
for filePath, names in envNames.items():
currentClashes = names.intersection(clashes)
if currentClashes:
detailedClashes[filePath]=currentClashes
if clashes:
hasErrors = True
s(repr(path))
s(", detected clashes:\n")
for filePath, fileClashes in detailedClashes.items():
s(" ")
s(filePath)
s(":\n ")
jsonCompactD(list(fileClashes), s)
s("\n")
return {"path": path, "hasErrors": hasErrors, "hasChanges": hasChanges}
def normalizePaths(paths, checkOnly, force, addGid, addSubGids, extraArgsHandling, s = sys.stderr.write):
"""Normalizes all the given paths"""
hasErrors = False
hasChanges = False
messages = []
for path in paths:
messages.append(path)
res = normalizePath(path, checkOnly=checkOnly, force=force,
addGid=addGid, addSubGids=addSubGids, extraArgsHandling=extraArgsHandling, s=s)
errors = res.get("hasErrors", True)
hasErrors = hasErrors or errors
if not errors:
messages.append(": OK")
else:
messages.append(": ERROR")
changes = res.get("hasChanges", False)
hasChanges = hasChanges or changes
if changes:
messages.append(", with changes\n")
else:
messages.append(", no changes\n")
s("\n")
for m in messages:
s(m)
s("\n")
if not hasErrors:
s("OK")
else:
s("ERROR")
if hasChanges:
s(", with changes\n")
if checkOnly:
s(" (run performed with --check-only, no changes were committed to the filesystem)\n")
else:
s(", no changes\n")
return {"paths": paths, "hasErrors": hasErrors, "hasChanges": hasChanges}
if __name__ == "__main__":
defaultDir = os.path.normpath(os.path.join(basePath, "../nomad-meta-info/nomad_meta_info"))
usage = """usage: {command} [--check-only] [--force] [--add-gid] [--add-sub-gids] [--keep-extra-args] [--remove-extra-args] [--error-if-extra-args] [file/or/dir ...]
normalizes the InfoKinds file/or/dir (that defalts to {dir})
""".format(command=os.path.basename(sys.argv[0]), dir=defaultDir)
checkOnly = False
force = False
addGid = False
addSubGids = False
paths = []
extraArgsHandling = InfoKindEl.ADD_EXTRA_ARGS
for arg in sys.argv[1:]:
if arg == "--check-only":
checkOnly = True
elif arg == "--force":
force = True
elif arg == "--add-gid":
addGid = True
elif arg == "--add-sub-gids":
addSubGids = True
elif arg == "--add-extra-args":
extraArgsHandling = InfoKindEl.ADD_EXTRA_ARGS
elif arg == "--remove-extra-args":
extraArgsHandling = InfoKindEl.IGNORE_EXTRA_ARGS
elif arg == "--error-if-extra-args":
extraArgsHandling = InfoKindEl.RAISE_IF_EXTRA_ARGS
elif arg == "--help":
sys.stdout.write(usage)
sys.exit(0)
elif os.path.exists(arg):
paths.append(arg)
else:
sys.stdout.write("Error: invalid argument ")
sys.stdout.write(repr(arg))
sys.stdout.write("\n")
sys.stdout.write(usage)
sys.exit(1)
if not paths:
paths.append(defaultDir)
normalizePaths(paths, checkOnly=checkOnly, force=force, addGid=addGid, addSubGids=addSubGids, extraArgsHandling=extraArgsHandling, s = sys.stderr.write)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment