from __future__ import print_function from __future__ import division import sys import osc import osc.conf import osc.core import osc.oscerr from functools import partial from xml.etree import ElementTree if sys.version_info[0] < 3: def decode_it(arg): return arg else: from osc.util.helper import decode_it known_microarchs = {"sandybridge", "haswell", "skylake"} package_attributes = ["MPCDF:enable_repositories"] config_attributes = ["MPCDF:compiler_modules", "MPCDF:cuda_modules", "MPCDF:mpi_modules", "MPCDF:pgi_modules"] default_attributes = ["MPCDF:default_compiler", "MPCDF:default_cuda", "MPCDF:default_mpi", "MPCDF:default_python2", "MPCDF:default_python3"] intel_parallel_studio = { "mpcdf_intel_parallel_studio_2017_7": {"compiler": "intel_17_0_7", "impi": "impi_2017_4", "mkl": "mkl_2017_4-module", }, "mpcdf_intel_parallel_studio_2018_1": {"compiler": "intel_18_0_1", "impi": "impi_2018_1", "mkl": "mkl_2018_1-module", }, "mpcdf_intel_parallel_studio_2018_2": {"compiler": "intel_18_0_2", "impi": "impi_2018_2", "mkl": "mkl_2018_2-module", }, "mpcdf_intel_parallel_studio_2018_3": {"compiler": "intel_18_0_3", "impi": "impi_2018_3", "mkl": "mkl_2018_3-module", }, "mpcdf_intel_parallel_studio_2018_4": {"compiler": "intel_18_0_5", "impi": "impi_2018_4", "mkl": "mkl_2018_4-module", }, "mpcdf_intel_parallel_studio_2019_0": {"compiler": "intel_19_0_0", "impi": "impi_2019_0", "mkl": "mkl_2019_0-module", }, "mpcdf_intel_parallel_studio_2019_1": {"compiler": "intel_19_0_1", "impi": "impi_2019_1", "mkl": "mkl_2019_1-module", }, "mpcdf_intel_parallel_studio_2019_3": {"compiler": "intel_19_0_3", "impi": "impi_2019_3", "mkl": "mkl_2019_3-module", }, "mpcdf_intel_parallel_studio_2019_4": {"compiler": "intel_19_0_4", "impi": "impi_2019_4", "mkl": "mkl_2019_4-module", }, "mpcdf_intel_parallel_studio_2019_5": {"compiler": "intel_19_0_5", "impi": "impi_2019_5", "mkl": "mkl_2019_5-module", }, } all_mkls = {ic["mkl"] for ic in intel_parallel_studio.values()} # Some rotations mpi_parallel_studio = {value["impi"]: dict({"ps": key}, **value) for key, value in intel_parallel_studio.items()} compiler_parallel_studio = {value["compiler"]: dict({"ps": key}, **value) for key, value in intel_parallel_studio.items()} # For the autogenerated software/software:*:* project 'prjconf' sections prjconf_start_marker = "# Autogenerated by osc mpcdf_setup_repos, do not edit till end of section\n" prjconf_end_marker = "# End of autogenerated section\n" def compiler_module(compiler_repo): return compiler_repo.replace("_", "/", 1).replace("_", ".") def mpi_module(mpi_repo): return mpi_repo.replace("_", "/", 1).replace("_", ".") def valid_pgi_mpi(pgi, mpi): if "impi" not in mpi: return False if "2017" in mpi: return False return True def valid_mpi(compiler, mpi): """ It might be possible to use Intel MPI libararies and compilers from different Parallel Studio packages, but I currently do not want to support it. Take care to keep this in sync with the file 'macros.obs_cluster' of the package software:dist / mpcdf_cluster_macros """ if compiler.startswith("intel") and mpi.startswith("impi"): return mpi == compiler_parallel_studio[compiler]["impi"] if compiler.startswith("pgi"): return valid_pgi_mpi(compiler, mpi) else: return True def prefers(reponame): prefer_ps = None if reponame in compiler_parallel_studio: prefer_ps = compiler_parallel_studio[reponame]["ps"] else: for mpi in mpi_parallel_studio: if reponame.startswith(mpi): prefer_ps = mpi_parallel_studio[mpi]["ps"] if prefer_ps: preferred_mkl = intel_parallel_studio[prefer_ps]["mkl"] unprefer_other_mkls = ("!" + mkl for mkl in all_mkls if mkl != preferred_mkl) return (prefer_ps,) + tuple(unprefer_other_mkls) + (preferred_mkl,) else: return () def valid_cuda(cuda, compiler): """ The CUDA distribution only works with certain gcc versions, this little function takes care that only supported combinations are allowed. Take care to keep this in sync with the file 'macros.obs_cluster' of the package software:dist / mpcdf_cluster_macros """ if cuda == "cuda_8_0": return compiler == "gcc_5" if cuda == "cuda_9_1": return compiler == "gcc_6_3_0" if cuda == "cuda_9_2": return compiler == "gcc_6" if cuda == "cuda_10_0": return compiler == "gcc_6" if cuda == "cuda_10_1": return compiler == "gcc_8" return False def project_meta(api_url, project): return ElementTree.fromstringlist(osc.core.show_project_meta(api_url, project)) def package_meta(api_url, project, package): return ElementTree.fromstringlist(osc.core.show_package_meta(api_url, project, package)) def maintainers(api_url, project, package): root = package_meta(api_url, project, package) return {e.get("userid") for e in root.findall("./person[@role='maintainer']")} class UnsetAttributeException(Exception): pass def chunked(l, chunksize): n = len(l) i = 0 while i * chunksize < n: yield l[i * chunksize: (i + 1) * chunksize] i += 1 def get_attribute(api_url, project, package, attribute, with_project=False): attribute_meta = osc.core.show_attribute_meta(api_url, project, package, None, attribute, False, with_project) if attribute_meta is None: raise osc.oscerr.APIError("Cannot fetch value for attribute '{0}' from {1}".format(attribute, (project, package))) root = ElementTree.fromstringlist(attribute_meta) attribute = root.find("./attribute") if attribute is not None: return root else: raise UnsetAttributeException("Attribute not set") def get_attribute_values(api_url, project, package, attribute, with_project=False): root = get_attribute(api_url, project, package, attribute, with_project) attribute = root.find("./attribute") return list(value.text for value in attribute.findall("./value")) def get_attribute_value(api_url, project, package, attribute, with_project=False): value, = get_attribute_values(api_url, project, package, attribute, with_project=False) return value def set_attribute(api_url, project, package, attribute): path = ["source", project] if package: path.append(package) path.append("_attribute") attr_meta = ElementTree.tostring(attribute, encoding=osc.core.ET_ENCODING) url = osc.core.makeurl(api_url, path) resp = ElementTree.fromstringlist(osc.core.streamfile(url, osc.core.http_POST, data=attr_meta)) if resp.find("./summary").text != "Ok": raise osc.oscerr.APIError("Could not store attribute") def set_attribute_values(api_url, project, package, attribute, values): root = ElementTree.Element("attributes") attr = ElementTree.SubElement(root, "attribute") namespace, name = attribute.split(":") attr.set("namespace", namespace) attr.set("name", name) for value in values: v = ElementTree.SubElement(attr, "value") v.text = value set_attribute(api_url, project, package, root) def has_attribute(api_url, project, package, attribute): path = ["source", project] if package: path.append(package) path.append("_attribute") path.append(attribute) url = osc.core.makeurl(api_url, path) resp = ElementTree.fromstringlist(osc.core.streamfile(url, osc.core.http_GET)) namespace, name = attribute.split(":") for a in resp.findall("./attribute"): if a.get("namespace") == namespace and a.get("name") == name: return True return False def remove_attribute(api_url, project, package, attribute_name): path = ["source"] + [project] if package: path.append(package) path.append("_attribute") path.append(attribute_name) url = osc.core.makeurl(api_url, path) resp = ElementTree.fromstringlist(osc.core.streamfile(url, osc.core.http_DELETE)) if resp.find("./summary").text != "Ok": raise osc.oscerr.APIError("Could not remove attribute") def get_microarchitecture(project): if project == "software": # Stupid special case microarch = "sandybridge" elif project.startswith("software:"): microarch = project.split(":")[2] elif project.startswith("home:"): microarch = "sandybridge" else: raise Exception("Cannot determine micro-architecture for project '{0}'".format(project)) if microarch in known_microarchs: return microarch else: raise Exception("Unknown micro-architecture '{0}'".format(microarch)) def mpcdf_enable_repositories(api_url, project, package, verbose=False, filter_repos=None): from itertools import product import sys if filter_repos is None: filter_repos = () pkg_meta = osc.core.show_package_meta(api_url, project, package) root = ElementTree.fromstringlist(pkg_meta) build = root.find("./build") if build is None: build = ElementTree.SubElement(root, "build") pkg_meta = ElementTree.tostring(root, encoding=osc.core.ET_ENCODING) for enable in build.findall("./enable"): build.remove(enable) try: enable_repos = get_attribute_values(api_url, project, package, "MPCDF:enable_repositories") except Exception: if verbose: print("Warning: Could not get attribute MPCDF:enable_repositories for package {0}, skipping".format(package)) return False def try_get_attribute(package, attribute, with_project=False): try: return get_attribute_values(api_url, project, package, "MPCDF:" + attribute, with_project=with_project) except UnsetAttributeException: print("ERROR: Attribute MPCDF:" + attribute + " is not set for " + ("package '{0}'".format(package) if package else "project '{0}'".format(project)) + ", aborting here", file=sys.stderr) raise SystemExit(1) compilers = try_get_attribute(package, "compiler_modules", with_project=True) mpis = try_get_attribute(package, "mpi_modules", with_project=True) cudas = try_get_attribute(package, "cuda_modules", with_project=True) pgis = try_get_attribute(package, "pgi_modules", with_project=True) all_compilers = try_get_attribute(None, "compiler_modules") all_mpis = try_get_attribute(None, "mpi_modules") all_cudas = try_get_attribute(None, "cuda_modules") all_pgis = try_get_attribute(None, "pgi_modules") default_compilers = try_get_attribute(None, "default_compiler") default_mpis = try_get_attribute(None, "default_mpi") default_cudas = try_get_attribute(None, "default_cuda") def sort_key(string): name, *version = string.split("_") return (name,) + tuple(map(int, version)) latest_intel = sorted((c for c in all_compilers if c.startswith("intel")), key=sort_key)[-1] latest_gcc = sorted((c for c in all_compilers if c.startswith("gcc")), key=sort_key)[-1] def enable(name): if any(filtered_repo in name for filtered_repo in filter_repos): return node = ElementTree.Element("enable") node.set("repository", name) node.tail = "\n " build.insert(0, node) if verbose: print("Enabling", name) def actual_compilers(): for compiler in (c for c in compilers if c in all_compilers + ["default_compiler", "intel", "gcc", "latest_intel", "latest_gcc"]): if compiler == "intel": for intel_compiler in [cc for cc in all_compilers if cc.startswith("intel")]: yield intel_compiler elif compiler == "gcc": for gcc_compiler in [cc for cc in all_compilers if cc.startswith("gcc")]: yield gcc_compiler elif compiler == "default_compiler": for default_compiler in default_compilers: yield default_compiler elif compiler == "latest_intel": yield latest_intel elif compiler == "latest_gcc": yield latest_gcc else: yield compiler def actual_mpis(): for mpi in (m for m in mpis if m in all_mpis + ["default_mpi", "impi"]): if mpi == "impi": for impi in [mpi for mpi in mpis if mpi.startswith("impi")]: yield impi elif mpi == "default_mpi": for default_mpi in default_mpis: yield default_mpi else: yield mpi def actual_cudas(): for cuda in (c for c in cudas if c in all_cudas + ["default_cuda"]): if cuda == "default_cuda": for default_cuda in default_cudas: yield default_cuda else: yield cuda def actual_pgis(): for pgi in (p for p in pgis if p in all_pgis): yield pgi for flag in enable_repos: if flag == "system": enable("System") if flag == "compilers": for compiler in actual_compilers(): enable(compiler) if flag == "mpi": for mpi, compiler in product(actual_mpis(), actual_compilers()): if valid_mpi(compiler, mpi): enable(mpi + "_" + compiler) if flag == "cuda": for cuda, compiler in product(actual_cudas(), all_compilers): if valid_cuda(cuda, compiler): enable(cuda + "_" + compiler) if flag == "cuda_mpi": for cuda, mpi, compiler in product(actual_cudas(), actual_mpis(), all_compilers): if valid_cuda(cuda, compiler) and valid_mpi(compiler, mpi): enable(cuda + "_" + mpi + "_" + compiler) if flag == "pgi": for pgi in actual_pgis(): enable(pgi) if flag == "pgi_mpi": for mpi, pgi in product(actual_mpis(), actual_pgis()): if valid_pgi_mpi(pgi, mpi): enable(mpi + "_" + pgi) if len(build.getchildren()) > 0: build.getchildren()[-1].tail = "\n " new_pkg_meta = ElementTree.tostring(root, encoding=osc.core.ET_ENCODING) if pkg_meta != new_pkg_meta: print("Updating repositories for", package) osc.core.edit_meta("pkg", (project, package), data=new_pkg_meta) return True def mpcdf_setup_repositories(api_url, project, microarchitecture=None, distribution=None, parent=None, packages=None, dry_run=False, filter_repos=None, only_project=False, remove_old=False): import threading if parent: for attribute in config_attributes + default_attributes: print("Copying attribute '{0}' from parent project".format(attribute)) set_attribute(api_url, project, None, get_attribute(api_url, parent, None, attribute)) compilers = get_attribute_values(api_url, project, None, "MPCDF:compiler_modules") mpis = get_attribute_values(api_url, project, None, "MPCDF:mpi_modules") cudas = get_attribute_values(api_url, project, None, "MPCDF:cuda_modules") pgis = get_attribute_values(api_url, project, None, "MPCDF:pgi_modules") default_python2 = get_attribute_value(api_url, project, None, "MPCDF:default_python2") default_python3 = get_attribute_value(api_url, project, None, "MPCDF:default_python3") if distribution is None: # Get existing value from project meta dist_repo = project_meta(api_url, project if parent is None else parent).find( "./repository[@name='System']/path[@project='distributions']") if dist_repo is not None: distribution = dist_repo.get("repository") else: raise osc.oscerr.WrongArgs("ERROR: Specify distribution or parent project") print("Using '{0}' as base distribution".format(distribution)) distributions = project_meta(api_url, "distributions") dist_repo = distributions.find('./repository[@name="{0}"]'.format(distribution)) if dist_repo is None: raise osc.oscerr.WrongArgs("No repository '{0}' is defined in project 'distributions' on the server".format(distribution)) architectures = list(arch.text for arch in dist_repo.findall("./arch")) if microarchitecture is None: microarchitecture = get_microarchitecture(project) root = project_meta(api_url, project) prjconf = list(map(decode_it, osc.core.show_project_conf(api_url, project))) try: start = prjconf.index(prjconf_start_marker) end = prjconf.index(prjconf_end_marker) except ValueError: start = None end = len(prjconf) prjconf_head = "".join(prjconf[:start]) prjconf_tail = "".join(prjconf[end + 1:]) prjconf = [prjconf_start_marker] prjconf.append("Constraint: hostlabel {0}".format(microarchitecture)) prjconf.append( """ %if %_repository != System Macros: %microarchitecture {0} :Macros %endif""".format(microarchitecture)) prjconf.append("") prjconf.append("Prefer: mpcdf_python2_" + default_python2) prjconf.append("Prefer: mpcdf_python3_" + default_python3) prjconf.append("") # Remove existing repositories if remove_old: for oldrepo in root.findall("./repository"): root.remove(oldrepo) def repo(name, *dependencies, **kwargs): is_compiler = kwargs.pop("compiler", False) is_mpi = kwargs.pop("mpi", False) is_cuda = kwargs.pop("cuda", False) is_cuda_mpi = kwargs.pop("cuda_mpi", False) cuda_repo = kwargs.pop("cuda_repo", "") have_compiler = is_compiler or is_mpi or is_cuda or is_cuda_mpi have_mpi = is_mpi or is_cuda_mpi have_cuda = is_cuda or is_cuda_mpi existing_repo = root.find("./repository[@name='{0}']".format(name)) if existing_repo is not None: root.remove(existing_repo) else: print("New repository", name) r = ElementTree.SubElement(root, "repository") r.set("name", name) r.text = "\n " def path(project, repo): p = ElementTree.SubElement(r, "path") p.set("project", project) p.set("repository", repo) p.tail = "\n " if parent: path(parent, name) for dep_project, dep_repo in dependencies: path(dep_project, dep_repo) for arch in architectures: a = ElementTree.SubElement(r, "arch") a.text = arch a.tail = "\n " a.tail = "\n " r.tail = "\n " # In order to be able to figure out the matching MKL version for a given # compiler/MPI repository we emit a new macro '%matching_mkl_version' in # the cases this makes sense matching_mkl = [] prjconf.append("%if %_repository == {0}".format(name)) for prefer in prefers(name): if prefer.startswith("mkl_"): matching_mkl.append(prefer) prjconf.append("Prefer: " + prefer) prjconf.append("Macros:") prjconf.append("%is_compiler_repository {0}".format(1 if is_compiler else 0)) prjconf.append("%is_mpi_repository {0}".format(1 if is_mpi else 0)) prjconf.append("%is_cuda_repository {0}".format(1 if is_cuda else 0)) prjconf.append("%is_cuda_mpi_repository {0}".format(1 if is_cuda_mpi else 0)) prjconf.append("%have_mpcdf_compiler {0}".format(1 if have_compiler else 0)) prjconf.append("%have_mpcdf_mpi {0}".format(1 if have_mpi else 0)) prjconf.append("%have_mpcdf_cuda {0}".format(1 if have_cuda else 0)) if is_cuda: prjconf.append("%cuda_repository {0}".format(cuda_repo)) if matching_mkl: matching_mkl, = matching_mkl matching_mkl, _ = matching_mkl[len("mkl_"):].split("-module") matching_mkl = matching_mkl.replace("_", ".") prjconf.append("%matching_mkl_version {0}".format(matching_mkl)) for macro, value in kwargs.items(): prjconf.append("%{0} {1}".format(macro, value)) prjconf.append(":Macros") prjconf.append("%endif") prjconf.append("") if parent: repo("System") else: repo("System", ("distributions", distribution)) for compiler in compilers + pgis: repo(compiler, (project, "System"), compiler=True, compiler_repository=compiler, compiler_module=compiler_module(compiler)) for mpi in filter(partial(valid_mpi, compiler), mpis): repo(mpi + "_" + compiler, (project, compiler), mpi=True, mpi_repository=mpi, mpi_module=mpi_module(mpi)) for cuda in cudas: for compiler in filter(partial(valid_cuda, cuda), compilers): repo(cuda + "_" + compiler, (project, compiler), cuda=True, cuda_repo=cuda) for mpi in filter(partial(valid_mpi, compiler), mpis): repo(cuda + "_" + mpi + "_" + compiler, (project, cuda + "_" + compiler), (project, mpi + "_" + compiler), cuda_mpi=True) # Remove build configuration build = root.find("./build") if build is None: build = ElementTree.Element("build") build.text = "\n " disable = ElementTree.SubElement(build, "disable") disable.tail = "\n " build.tail = "\n " root.insert(list(root).index(root.find("./repository")), build) root.getchildren()[-1].tail = "\n" prj = ElementTree.tostring(root, encoding=osc.core.ET_ENCODING) prjconf.append(prjconf_end_marker) prjconf = prjconf_head + "\n".join(prjconf) + prjconf_tail if dry_run: print("osc meta prj {0} -F - < flags, that way no # spurious builds are launched if packages is None: packages = osc.core.meta_get_packagelist(api_url, project) if len(packages) > 40: chunksize = len(packages) // 20 else: chunksize = len(packages) def work(packagelist): for package in packagelist: if not mpcdf_enable_repositories(api_url, project, package, filter_repos=filter_repos): print("ATTENTION: Not changing unmanaged package {0}".format(package)) threads = [] for packagelist in chunked(packages, chunksize): t = threading.Thread(target=work, args=(packagelist,)) threads.append(t) t.start() for t in threads: t.join() # Update repositories print("Updating prj meta") osc.core.edit_meta("prj", project, data=prj, force=True) print("Updating prjconf meta") osc.core.edit_meta("prjconf", project, data=prjconf) def set_as_branch(api_url, my_project, my_package, main_project, main_package): import os def dirmeta(project, package): url = osc.core.makeurl(api_url, ["source", project, package]) return ElementTree.fromstringlist(osc.core.streamfile(url, osc.core.http_GET)) srcmeta = dirmeta(my_project, my_package) dstmeta = dirmeta(main_project, main_package) linkinfo = srcmeta.find('./linkinfo') if linkinfo is not None: print("ERROR: package {0} is already linked to package {1} in project {2}".format( my_package, linkinfo.get("package"), linkinfo.get("project"))) return False linkfile = ElementTree.Element("link") linkfile.set("project", main_project) linkfile.set("package", main_package) linkfile.set("baserev", dstmeta.get("srcmd5")) patches = ElementTree.SubElement(linkfile, "patches") ElementTree.SubElement(patches, "branch") url = osc.core.makeurl(api_url, ["source", my_project, my_package, "_link"]) resp = ElementTree.fromstringlist(osc.core.streamfile(url, osc.core.http_PUT, data=ElementTree.tostring(linkfile))) rev = resp.get("rev") if rev is None: print("ERROR: Could not commit _link file") return False else: print("Commited as revision", rev) this_package = osc.core.store_read_package(os.curdir) this_project = osc.core.store_read_project(os.curdir) if this_package == my_package and this_project == my_project: pac = osc.core.filedir_to_pac(os.curdir) rev = pac.latest_rev(expand=True) pac.update(rev) return True def sync_projects(api_url, package=None, from_project="software", to_projects=None, redo_all=False, add_to_maintainers=True): if package is not None and redo_all: raise osc.oscerr.WrongArgs('Cannot specify `redo_all` and package') if to_projects is None: to_projects = [p for p in osc.core.meta_get_project_list(api_url) if p.startswith("software:") and not (p == "software:dist" or p == "software:images")] for to_project in to_projects: print("Syncing {0} with {1}".format(to_project, from_project)) to_packages = osc.core.meta_get_packagelist(api_url, to_project) if package is None: from_packages = osc.core.meta_get_packagelist(api_url, from_project) if not redo_all: from_packages = list(sorted(set(from_packages) - set(to_packages))) else: from_packages = [package] for attribute in config_attributes: try: get_attribute(api_url, to_project, None, attribute) except Exception: attr = get_attribute(api_url, from_project, None, attribute) print("Setting attribute", attribute) set_attribute(api_url, to_project, None, attr) for orig_package in from_packages: if orig_package not in to_packages: filelist = osc.core.meta_get_filelist(api_url, from_project, orig_package) if "_link" in filelist: print("Not branching package {0}, is a link".format(orig_package)) else: print("Branching package {0}".format(orig_package)) exists, targetprj, targetpkg, srcprj, srcpkg = \ osc.core.branch_pkg(api_url, from_project, orig_package, target_project=to_project, nodevelproject=True) else: print("Not branching package {0}, already present in target".format(orig_package)) for attribute in package_attributes + config_attributes: try: attr = get_attribute(api_url, from_project, orig_package, attribute) except UnsetAttributeException: if has_attribute(api_url, to_project, orig_package, attribute): remove_attribute(api_url, to_project, orig_package, attribute) continue set_attribute(api_url, to_project, orig_package, attr) if add_to_maintainers: from_maintainers = maintainers(api_url, from_project, orig_package) if from_maintainers: to_maintainers = maintainers(api_url, to_project, orig_package) if from_maintainers - to_maintainers: new_maintainers = from_maintainers - to_maintainers to_meta = package_meta(api_url, to_project, orig_package) for userid in sorted(new_maintainers): person = ElementTree.Element("person") person.set("userid", userid) person.set("role", "maintainer") to_meta.insert(2, person) osc.core.edit_meta("pkg", (to_project, orig_package), data=ElementTree.tostring(to_meta)) if package is None and redo_all: # Check if distribution is already set in to_project to_prj_meta = project_meta(api_url, to_project) sys_repo = to_prj_meta.find('./repository[@name="System"]') if sys_repo is None: distribution = project_meta(api_url, from_project).find('./repository[@name="System"]/path[@project="distributions"]').get("repository") else: distribution = sys_repo.find('./path[@project="distributions"]').get("repository") print("Creating repository configuration") mpcdf_setup_repositories(api_url, to_project, distribution=distribution) else: for orig_package in from_packages: if not mpcdf_enable_repositories(api_url, to_project, orig_package): print("ATTENTION: Not changing unmanaged package {0}".format(orig_package))