diff --git a/mpcdf_common.py b/mpcdf_common.py
index ca774127d40a0c0ee3cbeaaf3f68c0ffb242c0f3..45210ba4f4e95d7b0d22f4beab02485b0ddac2af 100644
--- a/mpcdf_common.py
+++ b/mpcdf_common.py
@@ -20,7 +20,7 @@ known_microarchs = {"sandybridge", "haswell", "skylake"}
 
 package_attributes = ["MPCDF:enable_repositories"]
 config_attributes = ["MPCDF:compiler_modules", "MPCDF:cuda_modules", "MPCDF:mpi_modules", "MPCDF:pgi_modules"]
-default_attributes = ["MPCDF:default_compiler", "MPCDF:default_cuda", "MPCDF:default_mpi", "MPCDF:default_python2", "MPCDF:default_python3"]
+default_attributes = ["MPCDF:default_compiler", "MPCDF:default_cuda", "MPCDF:default_mpi"]
 
 intel_parallel_studio = {
     "mpcdf_intel_parallel_studio_2017_7": {"compiler": "intel_17_0_7", "impi": "impi_2017_4", "mkl": "mkl_2017_4-module", },
@@ -161,7 +161,7 @@ def prefers(reponame):
 
     if prefer_ps:
         preferred_mkl = intel_parallel_studio[prefer_ps]["mkl"]
-        unprefer_other_mkls = ("!" + mkl for mkl in all_mkls if mkl != preferred_mkl)
+        unprefer_other_mkls = sorted("!" + mkl for mkl in all_mkls if mkl != preferred_mkl)
         return (prefer_ps,) + tuple(unprefer_other_mkls) + (preferred_mkl,)
     else:
         return ()
@@ -208,6 +208,10 @@ class UnsetAttributeException(Exception):
     pass
 
 
+class UnmanagedPackageException(Exception):
+    pass
+
+
 def chunked(l, chunksize):
     n = len(l)
     i = 0
@@ -229,6 +233,40 @@ def get_attribute(api_url, project, package, attribute, with_project=False):
         raise UnsetAttributeException("Attribute not set")
 
 
+def overloaded_project_attribute(api_url, project, attribute):
+    try:
+        return get_attribute_values(api_url, project, None, attribute)
+    except UnsetAttributeException:
+        return get_attribute_values(api_url, "software", None, attribute)
+
+
+def overloaded_package_attribute(api_url, project, package, attribute):
+    try:
+        return get_attribute_values(api_url, project, package, attribute)
+    except UnsetAttributeException:
+        pass
+
+    try:
+        return get_attribute_values(api_url, project, None, attribute)
+    except UnsetAttributeException:
+        pass
+
+    return get_attribute_values(api_url, "software", None, attribute)
+
+
+def get_allowed_attribute_values(api_url, attribute):
+    path = ["attribute"] + attribute.split(":") + ["_meta"]
+    url = osc.core.makeurl(api_url, path, [])
+    try:
+        f = osc.core.http_GET(url)
+    except osc.core.HTTPError as e:
+        e.osc_msg = 'Error getting meta for attribute "{0}"'.format(attribute)
+        raise
+
+    root = ElementTree.fromstringlist(f.readlines())
+    return list(value.text for value in root.findall("./allowed/value"))
+
+
 def get_attribute_values(api_url, project, package, attribute, with_project=False):
     root = get_attribute(api_url, project, package, attribute, with_project)
     attribute = root.find("./attribute")
@@ -301,10 +339,7 @@ def remove_attribute(api_url, project, package, attribute_name):
 
 
 def get_microarchitecture(project):
-    if project == "software":
-        # Stupid special case
-        microarch = "sandybridge"
-    elif project.startswith("software:"):
+    if project.startswith("software:"):
         microarch = project.split(":")[2]
     elif project.startswith("home:"):
         microarch = "sandybridge"
@@ -323,13 +358,10 @@ def package_sort_key(string):
     return (name,) + tuple(map(int, version))
 
 
-def mpcdf_enable_repositories(api_url, project, package, verbose=False, filter_repos=None):
+def mpcdf_enable_repositories(api_url, project, package, verbose=False, ignore_repos=()):
     from itertools import product
     import sys
 
-    if filter_repos is None:
-        filter_repos = ()
-
     pkg_meta = osc.core.show_package_meta(api_url, project, package)
     root = ElementTree.fromstringlist(pkg_meta)
 
@@ -347,36 +379,31 @@ def mpcdf_enable_repositories(api_url, project, package, verbose=False, filter_r
     except Exception:
         if verbose:
             print("Warning: Could not get attribute MPCDF:enable_repositories for package {0}, skipping".format(package))
-        return False
+        raise UnmanagedPackageException()
 
-    def try_get_attribute(package, attribute, with_project=False):
-        try:
-            return get_attribute_values(api_url, project, package, "MPCDF:" + attribute, with_project=with_project)
-        except UnsetAttributeException:
-            print("ERROR: Attribute MPCDF:" + attribute, "is not set for",
-                  "package '{0}'".format(package) if package else "project '{0}'".format(project),
-                  "- aborting here", file=sys.stderr)
-            raise SystemExit(1)
+    if project == "software":
+        distributions = list(sorted(repo.name for repo in osc.core.get_repos_of_project(api_url, "software")))
 
-    compilers = try_get_attribute(package, "compiler_modules", with_project=True)
-    mpis = try_get_attribute(package, "mpi_modules", with_project=True)
-    cudas = try_get_attribute(package, "cuda_modules", with_project=True)
-    pgis = try_get_attribute(package, "pgi_modules", with_project=True)
+    else:
+        compilers = overloaded_package_attribute(api_url, project, package, "MPCDF:compiler_modules")
+        mpis = overloaded_package_attribute(api_url, project, package, "MPCDF:mpi_modules")
+        cudas = overloaded_package_attribute(api_url, project, package, "MPCDF:cuda_modules")
+        pgis = overloaded_package_attribute(api_url, project, package, "MPCDF:pgi_modules")
 
-    all_compilers = try_get_attribute(None, "compiler_modules")
-    all_mpis = try_get_attribute(None, "mpi_modules")
-    all_cudas = try_get_attribute(None, "cuda_modules")
-    all_pgis = try_get_attribute(None, "pgi_modules")
+        all_compilers = overloaded_project_attribute(api_url, project, "MPCDF:compiler_modules")
+        all_mpis = overloaded_project_attribute(api_url, project, "MPCDF:mpi_modules")
+        all_cudas = overloaded_project_attribute(api_url, project, "MPCDF:cuda_modules")
+        all_pgis = overloaded_project_attribute(api_url, project, "MPCDF:pgi_modules")
 
-    default_compilers = try_get_attribute(None, "default_compiler")
-    default_mpis = try_get_attribute(None, "default_mpi")
-    default_cudas = try_get_attribute(None, "default_cuda")
+        default_compilers = overloaded_project_attribute(api_url, project, "MPCDF:default_compiler")
+        default_mpis = overloaded_project_attribute(api_url, project, "MPCDF:default_mpi")
+        default_cudas = overloaded_project_attribute(api_url, project, "MPCDF:default_cuda")
 
-    latest_intel = sorted((c for c in all_compilers if c.startswith("intel")), key=package_sort_key)[-1]
-    latest_gcc = sorted((c for c in all_compilers if c.startswith("gcc")), key=package_sort_key)[-1]
+        latest_intel = sorted((c for c in all_compilers if c.startswith("intel")), key=package_sort_key)[-1]
+        latest_gcc = sorted((c for c in all_compilers if c.startswith("gcc")), key=package_sort_key)[-1]
 
     def enable(name):
-        if any(filtered_repo in name for filtered_repo in filter_repos):
+        if name in ignore_repos:
             return
         node = ElementTree.Element("enable")
         node.set("repository", name)
@@ -427,37 +454,39 @@ def mpcdf_enable_repositories(api_url, project, package, verbose=False, filter_r
             yield pgi
 
     for flag in enable_repos:
+        if project == "software":
+            if flag == "system":
+                for distribution in distributions:
+                    enable(distribution)
 
-        if flag == "system":
-            enable("System")
-
-        if flag == "compilers":
-            for compiler in actual_compilers():
-                enable(compiler)
-
-        if flag == "mpi":
-            for mpi, compiler in product(actual_mpis(), actual_compilers()):
-                if valid_mpi(compiler, mpi):
-                    enable(mpi + "_" + compiler)
-
-        if flag == "cuda":
-            for cuda, compiler in product(actual_cudas(), all_compilers):
-                if valid_cuda(cuda, compiler):
-                    enable(cuda + "_" + compiler)
-
-        if flag == "cuda_mpi":
-            for cuda, mpi, compiler in product(actual_cudas(), actual_mpis(), all_compilers):
-                if valid_cuda(cuda, compiler) and valid_mpi(compiler, mpi):
-                    enable(cuda + "_" + mpi + "_" + compiler)
-
-        if flag == "pgi":
-            for pgi in actual_pgis():
-                enable(pgi)
-
-        if flag == "pgi_mpi":
-            for mpi, pgi in product(actual_mpis(), actual_pgis()):
-                if valid_pgi_mpi(pgi, mpi):
-                    enable(mpi + "_" + pgi)
+        else:
+            if flag == "compilers":
+                for compiler in actual_compilers():
+                    enable(compiler)
+
+            if flag == "mpi":
+                for mpi, compiler in product(actual_mpis(), actual_compilers()):
+                    if valid_mpi(compiler, mpi):
+                        enable(mpi + "_" + compiler)
+
+            if flag == "cuda":
+                for cuda, compiler in product(actual_cudas(), all_compilers):
+                    if valid_cuda(cuda, compiler):
+                        enable(cuda + "_" + compiler)
+
+            if flag == "cuda_mpi":
+                for cuda, mpi, compiler in product(actual_cudas(), actual_mpis(), all_compilers):
+                    if valid_cuda(cuda, compiler) and valid_mpi(compiler, mpi):
+                        enable(cuda + "_" + mpi + "_" + compiler)
+
+            if flag == "pgi":
+                for pgi in actual_pgis():
+                    enable(pgi)
+
+            if flag == "pgi_mpi":
+                for mpi, pgi in product(actual_mpis(), actual_pgis()):
+                    if valid_pgi_mpi(pgi, mpi):
+                        enable(mpi + "_" + pgi)
 
     if len(build.getchildren()) > 0:
         build.getchildren()[-1].tail = "\n  "
@@ -470,90 +499,79 @@ def mpcdf_enable_repositories(api_url, project, package, verbose=False, filter_r
     return True
 
 
-def mpcdf_setup_repositories(api_url, project, microarchitecture=None, distribution=None, parent=None,
-                             packages=None, dry_run=False, filter_repos=None, only_project=False, remove_old=False):
-    import threading
+def parse_prjconf(api_url, project):
+    orig_prjconf = list(map(decode_it, osc.core.show_project_conf(api_url, project)))
+    try:
+        start = orig_prjconf.index(prjconf_start_marker)
+        end = orig_prjconf.index(prjconf_end_marker)
+    except ValueError:
+        start = None
+        end = len(orig_prjconf)
 
-    if parent:
+    prjconf_head = orig_prjconf[:start]
+    prjconf_ours = orig_prjconf[start:end]
+    prjconf_tail = orig_prjconf[end + 1:]
+    return orig_prjconf, prjconf_head, prjconf_ours, prjconf_tail
+
+
+def mpcdf_setup_subproject(api_url, project, distribution, microarchitecture,
+                           parent=None, dry_run=False, remove_old=False, all_possible=False):
+
+    if parent and not dry_run:
         for attribute in config_attributes + default_attributes:
             print("Copying attribute '{0}' from parent project".format(attribute))
             set_attribute(api_url, project, None, get_attribute(api_url, parent, None, attribute))
 
-    compilers = get_attribute_values(api_url, project, None, "MPCDF:compiler_modules")
-    mpis = get_attribute_values(api_url, project, None, "MPCDF:mpi_modules")
-    cudas = get_attribute_values(api_url, project, None, "MPCDF:cuda_modules")
-    pgis = get_attribute_values(api_url, project, None, "MPCDF:pgi_modules")
-    default_python2 = get_attribute_value(api_url, project, None, "MPCDF:default_python2")
-    default_python3 = get_attribute_value(api_url, project, None, "MPCDF:default_python3")
-
-    if distribution is None:
-        # Get existing value from project meta
-        dist_repo = project_meta(api_url, project if parent is None else parent).find(
-            "./repository[@name='System']/path[@project='distributions']")
-        if dist_repo is not None:
-            distribution = dist_repo.get("repository")
-        else:
-            raise osc.oscerr.WrongArgs("ERROR: Specify distribution or parent project")
-        print("Using '{0}' as base distribution".format(distribution))
-
-    distributions = project_meta(api_url, "distributions")
-    dist_repo = distributions.find('./repository[@name="{0}"]'.format(distribution))
+    # Check distribution
+    software_meta = project_meta(api_url, "software")
+    dist_repo = software_meta.find('./repository[@name="{0}"]'.format(distribution))
     if dist_repo is None:
-        raise osc.oscerr.WrongArgs("No repository '{0}' is defined in project 'distributions' on the server".format(distribution))
+        raise osc.oscerr.WrongArgs("Invalid distribution '{0}': No matching repository is defined in project 'software' on the server".format(distribution))
     architectures = list(arch.text for arch in dist_repo.findall("./arch"))
 
-    if microarchitecture is None:
-        microarchitecture = get_microarchitecture(project)
-
     root = project_meta(api_url, project)
 
-    prjconf = list(map(decode_it, osc.core.show_project_conf(api_url, project)))
-
-    try:
-        start = prjconf.index(prjconf_start_marker)
-        end = prjconf.index(prjconf_end_marker)
-    except ValueError:
-        start = None
-        end = len(prjconf)
-
-    prjconf_head = "".join(prjconf[:start])
-    prjconf_tail = "".join(prjconf[end + 1:])
-    prjconf = [prjconf_start_marker]
-
-    prjconf.append("Constraint: hostlabel {0}".format(microarchitecture))
-    prjconf.append(
-        """
-%if %_repository != System
+    orig_prjconf, prjconf_head, prjconf_ours, prjconf_tail = parse_prjconf(api_url, project)
+
+    prjconf_repos = {}
+    cur_repo = None
+    old_repos = set()
+    for line in prjconf_ours:
+        line = line.rstrip("\n")
+        if cur_repo is None and line.startswith("%if %_repository =="):
+            cur_repo = line.split()[-1]
+            old_repos.add(cur_repo)
+            prjconf_repos[cur_repo] = []
+        if cur_repo is not None:
+            prjconf_repos[cur_repo].append(line)
+            if line == "%endif":
+                cur_repo = None
+
+    prjconf_ours = [prjconf_start_marker]
+
+    prjconf_ours.append("Constraint: hostlabel {0}".format(microarchitecture))
+    prjconf_ours.append("PublishFilter: ^mpcdf_.*$")
+    prjconf_ours.append("""
 Macros:
 %microarchitecture {0}
-:Macros
-%endif""".format(microarchitecture))
-    prjconf.append("")
-    prjconf.append("Prefer: mpcdf_python2_" + default_python2)
-    prjconf.append("Prefer: mpcdf_python3_" + default_python3)
-    prjconf.append("")
+:Macros""".format(microarchitecture))
+    prjconf_ours.append("")
 
     # Remove existing repositories
     if remove_old:
         for oldrepo in root.findall("./repository"):
             root.remove(oldrepo)
 
-    def repo(name, *dependencies, **kwargs):
-        is_compiler = kwargs.pop("compiler", False)
-        is_mpi = kwargs.pop("mpi", False)
-        is_cuda = kwargs.pop("cuda", False)
-        is_cuda_mpi = kwargs.pop("cuda_mpi", False)
-
-        cuda_repo = kwargs.pop("cuda_repo", "")
-
-        have_compiler = is_compiler or is_mpi or is_cuda or is_cuda_mpi
-        have_mpi = is_mpi or is_cuda_mpi
-        have_cuda = is_cuda or is_cuda_mpi
+    def repo(name, *dependencies, compiler=False, mpi=False, cuda=False, cuda_mpi=False, additional_prefers=(), **macros):
+        old_repos.discard(name)
+        have_compiler = compiler or mpi or cuda or cuda_mpi
+        have_mpi = mpi or cuda_mpi
+        have_cuda = cuda or cuda_mpi
 
         existing_repo = root.find("./repository[@name='{0}']".format(name))
         if existing_repo is not None:
             root.remove(existing_repo)
-        else:
+        elif dry_run is False:
             print("New repository", name)
 
         r = ElementTree.SubElement(root, "repository")
@@ -566,7 +584,7 @@ Macros:
             p.set("repository", repo)
             p.tail = "\n    "
 
-        if parent:
+        if parent and name != "System":
             path(parent, name)
         for dep_project, dep_repo in dependencies:
             path(dep_project, dep_repo)
@@ -582,47 +600,82 @@ Macros:
         # compiler/MPI repository we emit a new macro '%matching_mkl_version' in
         # the cases this makes sense
         matching_mkl = []
-        prjconf.append("%if %_repository == {0}".format(name))
-        for prefer in prefers(name):
+        repoconf = []
+        repoconf.append("%if %_repository == {0}".format(name))
+        for prefer in prefers(name) + additional_prefers:
             if prefer.startswith("mkl_"):
                 matching_mkl.append(prefer)
-            prjconf.append("Prefer: " + prefer)
+            repoconf.append("Prefer: " + prefer)
 
-        prjconf.append("Macros:")
+        repoconf.append("Macros:")
 
-        prjconf.append("%is_compiler_repository {0}".format(1 if is_compiler else 0))
-        prjconf.append("%is_mpi_repository {0}".format(1 if is_mpi else 0))
-        prjconf.append("%is_cuda_repository {0}".format(1 if is_cuda else 0))
-        prjconf.append("%is_cuda_mpi_repository {0}".format(1 if is_cuda_mpi else 0))
+        repoconf.append("%is_compiler_repository {0}".format(1 if compiler else 0))
+        repoconf.append("%is_mpi_repository {0}".format(1 if mpi else 0))
+        repoconf.append("%is_cuda_repository {0}".format(1 if cuda else 0))
+        repoconf.append("%is_cuda_mpi_repository {0}".format(1 if cuda_mpi else 0))
 
-        prjconf.append("%have_mpcdf_compiler {0}".format(1 if have_compiler else 0))
-        prjconf.append("%have_mpcdf_mpi {0}".format(1 if have_mpi else 0))
-        prjconf.append("%have_mpcdf_cuda {0}".format(1 if have_cuda else 0))
-
-        if is_cuda:
-            prjconf.append("%cuda_repository {0}".format(cuda_repo))
+        repoconf.append("%have_mpcdf_compiler {0}".format(1 if have_compiler else 0))
+        repoconf.append("%have_mpcdf_mpi {0}".format(1 if have_mpi else 0))
+        repoconf.append("%have_mpcdf_cuda {0}".format(1 if have_cuda else 0))
 
         if matching_mkl:
             matching_mkl, = matching_mkl
             matching_mkl, _ = matching_mkl[len("mkl_"):].split("-module")
             matching_mkl = matching_mkl.replace("_", ".")
-            prjconf.append("%matching_mkl_version {0}".format(matching_mkl))
+            repoconf.append("%matching_mkl_version {0}".format(matching_mkl))
 
-        for macro, value in kwargs.items():
-            prjconf.append("%{0} {1}".format(macro, value))
+        for macro, value in macros.items():
+            repoconf.append("%{0} {1}".format(macro, value))
 
-        prjconf.append(":Macros")
-        prjconf.append("%endif")
-        prjconf.append("")
+        repoconf.append(":Macros")
+        repoconf.append("%endif")
+        prjconf_repos[name] = repoconf
 
-    if parent:
-        repo("System")
+    def actual_compiler(c):
+        if c.startswith("latest"):
+            return False
+        if c == "default_compiler":
+            return False
+        if c == "intel":
+            return False
+        if c == "gcc":
+            return False
+        return True
+
+    def actual_mpi(m):
+        if m == "default_mpi":
+            return False
+        if m == "impi":
+            return False
+        return True
+
+    def actual_cuda(c):
+        if c == "default_cuda":
+            return False
+        return True
+
+    if all_possible:
+        compilers = list(filter(actual_compiler, get_allowed_attribute_values(api_url, "MPCDF:compiler_modules")))
+        mpis = list(filter(actual_mpi, get_allowed_attribute_values(api_url, "MPCDF:mpi_modules")))
+        cudas = list(filter(actual_cuda, get_allowed_attribute_values(api_url, "MPCDF:cuda_modules")))
+        pgis = get_allowed_attribute_values(api_url, "MPCDF:pgi_modules")
     else:
-        repo("System", ("distributions", distribution))
+        compilers = overloaded_project_attribute(api_url, project, "MPCDF:compiler_modules")
+        mpis = overloaded_project_attribute(api_url, project, "MPCDF:mpi_modules")
+        cudas = overloaded_project_attribute(api_url, project, "MPCDF:cuda_modules")
+        pgis = overloaded_project_attribute(api_url, project, "MPCDF:pgi_modules")
+
+    if parent:
+        repo("System", ("software", distribution))
 
     for compiler in compilers + pgis:
-        repo(compiler, (project, "System"), compiler=True,
-             compiler_repository=compiler, compiler_module=compiler_module(compiler))
+        if project != "software":
+            dependencies = ("software", distribution)
+        else:
+            dependencies = ()
+        repo(compiler, dependencies, compiler=True,
+             compiler_repository=compiler, compiler_module=compiler_module(compiler),
+             additional_prefers=("mpcdf_compiler_" + compiler,))
 
         for mpi in filter(partial(valid_mpi, compiler), mpis):
             repo(mpi + "_" + compiler, (project, compiler), mpi=True,
@@ -630,13 +683,22 @@ Macros:
 
     for cuda in cudas:
         for compiler in filter(partial(valid_cuda, cuda), compilers):
-            repo(cuda + "_" + compiler, (project, compiler), cuda=True, cuda_repo=cuda)
+            repo(cuda + "_" + compiler, (project, compiler), cuda=True, cuda_repository=cuda)
             for mpi in filter(partial(valid_mpi, compiler), mpis):
                 repo(cuda + "_" + mpi + "_" + compiler,
                      (project, cuda + "_" + compiler),
                      (project, mpi + "_" + compiler),
                      cuda_mpi=True)
 
+    if old_repos and not remove_old:
+        print("Warning: Keeping the prjconf sections for the following obsolete repositories:")
+        for name in sorted(old_repos):
+            print(" -", name)
+        print()
+    else:
+        for old_repo in old_repos:
+            del prjconf_repos[old_repo]
+
     # Remove build configuration
     build = root.find("./build")
     if build is None:
@@ -648,45 +710,60 @@ Macros:
         root.insert(list(root).index(root.find("./repository")), build)
 
     root.getchildren()[-1].tail = "\n"
-    prj = ElementTree.tostring(root, encoding=osc.core.ET_ENCODING)
+    new_prj = ElementTree.tostring(root, encoding=osc.core.ET_ENCODING)
 
-    prjconf.append(prjconf_end_marker)
-    prjconf = prjconf_head + "\n".join(prjconf) + prjconf_tail
+    for name in sorted(prjconf_repos.keys()):
+        prjconf_ours.extend(prjconf_repos[name])
+        prjconf_ours.append("")
 
-    if dry_run:
-        print("osc meta prj {0} -F - <<EOF\n{1}\nEOF\n".format(project, prj))
-        print("osc meta prjconf {0} -F - <<EOF\n{1}\nEOF\n".format(project, prjconf))
-    else:
-        if not only_project:
-            # First set-up the <enable/> flags, that way no
-            # spurious builds are launched
-            if packages is None:
-                packages = osc.core.meta_get_packagelist(api_url, project)
-
-            if len(packages) > 40:
-                chunksize = len(packages) // 20
-            else:
-                chunksize = len(packages)
+    prjconf_ours.append(prjconf_end_marker)
 
-            def work(packagelist):
-                for package in packagelist:
-                    if not mpcdf_enable_repositories(api_url, project, package, filter_repos=filter_repos):
-                        print("ATTENTION: Not changing unmanaged package {0}".format(package))
+    new_prjconf = "".join(prjconf_head) + "\n".join(prjconf_ours) + "".join(prjconf_tail)
 
-            threads = []
-            for packagelist in chunked(packages, chunksize):
-                t = threading.Thread(target=work, args=(packagelist,))
-                threads.append(t)
-                t.start()
+    if dry_run:
+        print("osc meta prjconf {0} -F - <<EOF\n{1}\nEOF\n".format(project, new_prjconf))
+        print("osc meta prj {0} -F - <<EOF\n{1}\nEOF\n".format(project, new_prj))
+    else:
+        if new_prjconf == "".join(orig_prjconf):
+            print("prjconf unchanged")
+        else:
+            print("Updating prjconf meta")
+            osc.core.edit_meta("prjconf", project, data=new_prjconf)
 
-            for t in threads:
-                t.join()
+        # Create and remove the <enable/> flags before the new repsitories,
+        # that way no spurious builds are launched
+        print("Updating enabled repositories for all packages")
+        mpcdf_enable_repositories_for_all_packages(api_url, project)
 
         # Update repositories
         print("Updating prj meta")
-        osc.core.edit_meta("prj", project, data=prj, force=True)
-        print("Updating prjconf meta")
-        osc.core.edit_meta("prjconf", project, data=prjconf)
+        osc.core.edit_meta("prj", project, data=new_prj, force=True)
+
+
+def mpcdf_enable_repositories_for_all_packages(api_url, project, ignore_repos=()):
+    import threading
+    packages = osc.core.meta_get_packagelist(api_url, project)
+
+    if len(packages) > 40:
+        chunksize = len(packages) // 20
+    else:
+        chunksize = len(packages)
+
+    def work(packagelist):
+        for package in packagelist:
+            try:
+                mpcdf_enable_repositories(api_url, project, package, ignore_repos=ignore_repos)
+            except UnmanagedPackageException:
+                print("ATTENTION: Not changing unmanaged package {0}".format(package))
+
+    threads = []
+    for packagelist in chunked(packages, chunksize):
+        t = threading.Thread(target=work, args=(packagelist,))
+        threads.append(t)
+        t.start()
+
+    for t in threads:
+        t.join()
 
 
 def set_as_branch(api_url, my_project, my_package, main_project, main_package):
@@ -729,7 +806,7 @@ def set_as_branch(api_url, my_project, my_package, main_project, main_package):
     return True
 
 
-def sync_projects(api_url, package=None, from_project="software", to_projects=None, redo_all=False, add_to_maintainers=True):
+def sync_projects(api_url, package=None, from_project="software", to_projects=None, redo_all=False, add_to_maintainers=True, verbose=False):
     if package is not None and redo_all:
         raise osc.oscerr.WrongArgs('Cannot specify `redo_all` and package')
 
@@ -749,13 +826,15 @@ def sync_projects(api_url, package=None, from_project="software", to_projects=No
         else:
             from_packages = [package]
 
-        for attribute in config_attributes:
-            try:
-                get_attribute(api_url, to_project, None, attribute)
-            except Exception:
-                attr = get_attribute(api_url, from_project, None, attribute)
-                print("Setting attribute", attribute)
-                set_attribute(api_url, to_project, None, attr)
+        def non_system_package(package):
+            enable_repos = get_attribute_values(api_url, from_project, package, "MPCDF:enable_repositories")
+            if enable_repos == ["system"]:
+                print("Not branching package {0}, is only enabled for 'System'".format(package))
+                return False
+            else:
+                return True
+
+        from_packages = list(filter(non_system_package, from_packages))
 
         for orig_package in from_packages:
             if orig_package not in to_packages:
@@ -795,15 +874,17 @@ def sync_projects(api_url, package=None, from_project="software", to_projects=No
         if package is None and redo_all:
             # Check if distribution is already set in to_project
             to_prj_meta = project_meta(api_url, to_project)
-            sys_repo = to_prj_meta.find('./repository[@name="System"]')
-            if sys_repo is None:
-                distribution = project_meta(api_url, from_project).find('./repository[@name="System"]/path[@project="distributions"]').get("repository")
+            some_repo = to_prj_meta.find('./repository')
+            if some_repo is None:
+                distribution = project_meta(api_url, from_project).find('./repository/path[@project="software"]').get("repository")
             else:
-                distribution = sys_repo.find('./path[@project="distributions"]').get("repository")
+                distribution = some_repo.find('./path[@project="distributions"]').get("repository")
 
             print("Creating repository configuration")
-            mpcdf_setup_repositories(api_url, to_project, distribution=distribution)
-        else:
-            for orig_package in from_packages:
-                if not mpcdf_enable_repositories(api_url, to_project, orig_package):
-                    print("ATTENTION: Not changing unmanaged package {0}".format(orig_package))
+            mpcdf_setup_subproject(api_url, to_project, distribution=distribution)
+
+        for orig_package in from_packages:
+            try:
+                mpcdf_enable_repositories(api_url, to_project, orig_package, verbose=verbose)
+            except UnmanagedPackageException:
+                print("ATTENTION: Not changing unmanaged package {0}".format(orig_package))
diff --git a/mpcdf_enable_repositories.py b/mpcdf_enable_repositories.py
index b8213f2709d00e3a814cb31ec41aa78b68c24abb..196110c5562c11cfcc2368db741023c18880adfd 100644
--- a/mpcdf_enable_repositories.py
+++ b/mpcdf_enable_repositories.py
@@ -90,8 +90,8 @@ def do_mpcdf_enable_repositories(self, subcmd, opts, *args):
 
     if opts.recreate or opts.set or opts.disable:
         mpcdf_common.mpcdf_enable_repositories(api_url, project, package, verbose=True)
-        if project == "software":
-            mpcdf_common.sync_projects(api_url, package)
+        if project == "software" and opts.set != "system":
+            mpcdf_common.sync_projects(api_url, package, verbose=True)
 
     elif (opts.compiler_modules or opts.mpi_modules or opts.cuda_modules):
         print("ERROR: Invalid arguments, try --help")
diff --git a/mpcdf_refresh_aggregates.py b/mpcdf_refresh_aggregates.py
deleted file mode 100644
index 22e2594890e57dab890555f6f86a2d4227800113..0000000000000000000000000000000000000000
--- a/mpcdf_refresh_aggregates.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/usr/bin/python2
-from __future__ import print_function
-
-import os
-import osc
-import osc.conf
-import osc.core
-import osc.cmdln
-
-import mpcdf_common
-
-from xml.etree import ElementTree
-
-
-def do_mpcdf_refresh_aggregates(self, subcmd, opts, *args):
-    """${cmd_name}: Recreates the aggregate packages in software:dist
-
-    Use this only if you know what this means
-
-    Usage:
-        osc ${cmd_name}
-
-    ${cmd_option_list}
-
-    """
-
-    if len(args) != 0:
-        raise osc.oscerr.WrongArgs("Too many arguments")
-
-    if osc.core.store_read_project(os.curdir) != "software:dist" or \
-            osc.core.is_package_dir(os.curdir):
-        raise osc.oscerr.WrongArgs("You must be in the project checkout of software:dist")
-
-    dist = osc.core.Project(".", False)
-
-    apiurl = self.get_api_url()
-
-    def refresh_aggregate(aggregatename, project, source, target):
-        if not os.path.exists(aggregatename):
-            package = osc.core.Package.init_package(apiurl, "software:dist", aggregatename, aggregatename)
-            dist.addPackage(aggregatename)
-        else:
-            package = dist.get_pacobj(aggregatename)
-
-        with open(aggregatename + "/_aggregate", "w") as fd:
-            print('<aggregatelist>', file=fd)
-            print('  <aggregate project="{project}">'.format(project=project), file=fd)
-            print('    <nosources/>', file=fd)
-            print('    <repository target="{target}" source="{source}" />'.format(target=target, source=source), file=fd)
-            print('  </aggregate>', file=fd)
-            print('</aggregatelist>', file=fd)
-
-        if package.status("_aggregate") == "?":
-            package.addfile("_aggregate")
-
-        if package.status("_aggregate") in ("A", "M"):
-            dist.commit((aggregatename,), msg="Refresh aggregate", verbose=True)
-
-        pkg_meta = osc.core.show_package_meta(apiurl, "software:dist", aggregatename)
-        root = ElementTree.fromstringlist(pkg_meta)
-        pkg_meta = ElementTree.tostring(root, encoding=osc.core.ET_ENCODING)
-
-        build = root.find("./build")
-        if build is None:
-            build = ElementTree.SubElement(root, "build")
-
-        for enable in build.findall("./enable"):
-            build.remove(enable)
-
-        node = ElementTree.Element("enable")
-        node.set("repository", target)
-        node.tail = "\n  "
-        build.insert(0, node)
-
-        new_pkg_meta = ElementTree.tostring(root, encoding=osc.core.ET_ENCODING)
-        if pkg_meta != new_pkg_meta:
-            osc.core.edit_meta("pkg", ("software:dist", aggregatename),
-                               data=new_pkg_meta)
-
-    projects = [p for p in osc.core.meta_get_project_list(apiurl)
-                if p.startswith("software") and not (p == "software:dist" or p == "software:images")]
-
-    macros = {}
-    for project in projects:
-        if project == "software":
-            # Stupid special case
-            target = "SLE_12_SP3-sandybridge"
-        else:
-            target = project[len("software:"):].replace(":", "-")
-
-        macros[target] = []
-        for attribute in mpcdf_common.config_attributes + mpcdf_common.default_attributes:
-            _, name = attribute.split(":")
-            values = mpcdf_common.get_attribute_values(apiurl, project, None, attribute)
-            macros[target].append("%available_{0} {1}".format(name, ",".join(sorted(values))))
-
-        all_compilers = mpcdf_common.get_attribute_values(apiurl, project, None, "MPCDF:compiler_modules")
-        latest_intel = sorted((c for c in all_compilers if c.startswith("intel")), key=mpcdf_common.package_sort_key)[-1]
-        latest_gcc = sorted((c for c in all_compilers if c.startswith("gcc")), key=mpcdf_common.package_sort_key)[-1]
-        macros[target].append("%latest_intel " + latest_intel)
-        macros[target].append("%latest_gcc " + latest_gcc)
-
-        for repo in osc.core.get_repositories_of_project(apiurl, project):
-            aggregatename = "zz_aggregate_" + project.replace(":", "-") + "_" + repo
-            refresh_aggregate(aggregatename, project, repo, target)
-
-    prjconf = list(map(mpcdf_common.decode_it, osc.core.show_project_conf(apiurl, "software:dist")))
-    start_marker = "# Autogenerated by osc mpcdf_refresh_aggregates, do not edit till end of section\n"
-    end_marker = "# End of autogenerated section\n"
-
-    try:
-        start = prjconf.index(start_marker)
-        end = prjconf.index(end_marker)
-    except ValueError:
-        start = None
-        end = len(prjconf)
-
-    prjconf_head = "".join(prjconf[:start])
-    prjconf_tail = "".join(prjconf[end + 1:])
-    prjconf = [start_marker]
-
-    distributions = osc.core.get_repositories_of_project(apiurl, "distributions")
-    for swdistrepo in osc.core.get_repositories_of_project(apiurl, "software:dist"):
-        for distrepo in distributions:
-            if swdistrepo.startswith(distrepo):
-                refresh_aggregate("zz_aggregate_" + swdistrepo + "_distributions", "distributions", distrepo, swdistrepo)
-                break
-            distrepo = None
-        prjconf.append('%if "%_repository" == "{0}"'.format(swdistrepo))
-        prjconf.append("Macros:")
-        prjconf.extend(macros[swdistrepo])
-        prjconf.append("%distribution {0}".format(distrepo))
-        prjconf.append(":Macros")
-        prjconf.append("%endif")
-        prjconf.append("")
-
-    prjconf.append(end_marker)
-    prjconf = prjconf_head + "\n".join(prjconf) + prjconf_tail
-    osc.core.edit_meta("prjconf", "software:dist", data=prjconf)
diff --git a/mpcdf_setup_clusters_project.py b/mpcdf_setup_clusters_project.py
new file mode 100644
index 0000000000000000000000000000000000000000..2885a86bc94abfa0074ebf2daff3133f31e537e9
--- /dev/null
+++ b/mpcdf_setup_clusters_project.py
@@ -0,0 +1,84 @@
+from __future__ import print_function
+
+import osc
+import osc.conf
+import osc.core
+import osc.cmdln
+
+import mpcdf_common
+
+
+@osc.cmdln.alias("mpcdf_setup_clusters")
+def do_mpcdf_setup_cluster_project(self, subcmd, opts, *args):
+    """${cmd_name}: Set-up the 'clusters' project
+
+    This creates the 'prjconf' for the 'clusters' project
+
+    Usage:
+        osc ${cmd_name}
+
+    ${cmd_option_list}
+
+    """
+
+    if len(args) != 0:
+        raise osc.oscerr.WrongArgs("Too many arguments")
+
+    api_url = self.get_api_url()
+
+    projects = [p for p in osc.core.meta_get_project_list(api_url)
+                if p.startswith("software:")]
+
+    macros = {}
+    for project in projects:
+        target = project[len("software:"):].replace(":", "-")
+
+        if target in macros:
+            raise Exception("Internal error")
+
+        macros[target] = []
+        for attribute in mpcdf_common.config_attributes + mpcdf_common.default_attributes:
+            _, name = attribute.split(":")
+            values = mpcdf_common.overloaded_project_attribute(api_url, project, attribute)
+            macros[target].append("%available_{0} {1}".format(name, ",".join(sorted(values))))
+
+        all_compilers = mpcdf_common.overloaded_project_attribute(api_url, project, "MPCDF:compiler_modules")
+        latest_intel = sorted((c for c in all_compilers if c.startswith("intel")), key=mpcdf_common.package_sort_key)[-1]
+        latest_gcc = sorted((c for c in all_compilers if c.startswith("gcc")), key=mpcdf_common.package_sort_key)[-1]
+        macros[target].append("%latest_intel " + latest_intel)
+        macros[target].append("%latest_gcc " + latest_gcc)
+
+    prjconf = list(map(mpcdf_common.decode_it, osc.core.show_project_conf(api_url, "clusters")))
+    start_marker = "# Autogenerated by osc mpcdf_setup_clusters_project, do not edit till end of section\n"
+    end_marker = "# End of autogenerated section\n"
+
+    try:
+        start = prjconf.index(start_marker)
+        end = prjconf.index(end_marker)
+    except ValueError:
+        start = None
+        end = len(prjconf)
+
+    prjconf_head = "".join(prjconf[:start])
+    prjconf_tail = "".join(prjconf[end + 1:])
+    prjconf = [start_marker]
+
+    distributions = osc.core.get_repositories_of_project(api_url, "software")
+    for repo in osc.core.get_repositories_of_project(api_url, "clusters"):
+        for dist in distributions:
+            if repo.startswith(dist):
+                break
+            dist = None
+        if dist is None:
+            raise Exception("Internal error")
+        prjconf.append('%if "%_repository" == "{0}"'.format(repo))
+        prjconf.append("Macros:")
+        prjconf.extend(macros[repo])
+        prjconf.append("%distribution {0}".format(dist))
+        prjconf.append(":Macros")
+        prjconf.append("%endif")
+        prjconf.append("")
+
+    prjconf.append(end_marker)
+    prjconf = prjconf_head + "\n".join(prjconf) + prjconf_tail
+    osc.core.edit_meta("prjconf", "clusters", data=prjconf)
diff --git a/mpcdf_setup_home_project.py b/mpcdf_setup_home_project.py
new file mode 100644
index 0000000000000000000000000000000000000000..5750e91e5ffb22307a5eb879fde4b013f289da3f
--- /dev/null
+++ b/mpcdf_setup_home_project.py
@@ -0,0 +1,77 @@
+#!/usr/bin/python2
+from __future__ import print_function
+
+from mpcdf_common import mpcdf_setup_subproject, project_meta, decode_it
+
+import os
+import osc
+import osc.conf
+import osc.core
+import osc.cmdln
+
+@osc.cmdln.option('-n', '--dry-run', action="store_true",
+                  help="Do not actually run anything but output the resulting XML configuration")
+@osc.cmdln.option('--remove-old', action="store_true", default=False,
+                  help="Remove all obsolete repositories instead of only disabling builds for packages there")
+@osc.cmdln.option('--distribution',
+                  help="Base distribution, necessary argument unless set previously for this project")
+@osc.cmdln.option('--microarchitecture', metavar="ARCH", nargs=1,
+                  help="Configure project to use ARCH as microarchitecture")
+@osc.cmdln.alias("mpcdf_setup_home")
+def do_mpcdf_setup_home_project(self, subcmd, opts, *args):
+    """${cmd_name}: Setup a home project based on a software: sub-project
+
+    Set-up a home:$USER project for building packages for the 'software'
+    project hierarchy (i.e. application group packages)
+
+    Usage:
+        osc ${cmd_name} [home:USER]
+
+    ${cmd_option_list}
+
+    """
+
+    if len(args) == 0:
+        if osc.core.is_project_dir(os.curdir) or osc.core.is_package_dir(os.curdir):
+            project = osc.core.store_read_project(os.curdir)
+        else:
+            raise osc.oscerr.WrongArgs('Specify PROJECT or run command in an osc checkout directory')
+
+    elif len(args) == 1:
+        project, = args
+    else:
+        raise osc.oscerr.WrongArgs("Too many arguments")
+
+    if project.split(":")[0] != "home":
+        raise osc.oscerr.WrongArgs('Given project is not a home: project')
+
+    api_url = self.get_api_url()
+
+    if opts.distribution is None:
+        # Get existing value from project meta
+        dist_repo = project_meta(api_url, project).find(
+            "./repository[@name='System']/path")
+        if dist_repo is not None:
+            distribution = dist_repo.get("repository")
+        else:
+            raise osc.oscerr.WrongArgs('Could not determine desired distribution, please specify --distribution explicitly')
+    else:
+        distribution = opts.distribution
+
+    microarchitecture = None
+    if opts.microarchitecture is None:
+        for prjconfline in map(decode_it, osc.core.show_project_conf(api_url, project)):
+            if prjconfline.startwith("Constraint: hostlabel"):
+                microarchitecture = prjconfline.split()[2]
+                break
+    else:
+        microarchitecture = opts.microarchitecture
+
+    if microarchitecture is None:
+        raise osc.oscerr.WrongArgs('Could not determine desired microarchitecture, please specify --microarchitecture explicitly')
+
+    parent = "software:{0}:{1}".format(distribution, microarchitecture)
+
+    mpcdf_setup_subproject(api_url,
+                           project, distribution, microarchitecture,
+                           parent=parent, dry_run=opts.dry_run, remove_old=opts.remove_old)
diff --git a/mpcdf_setup_repositories.py b/mpcdf_setup_repositories.py
deleted file mode 100644
index 53800023b774bf1078529f65ec631210e1e5eeef..0000000000000000000000000000000000000000
--- a/mpcdf_setup_repositories.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/python2
-from __future__ import print_function
-
-from mpcdf_common import mpcdf_setup_repositories
-
-import os
-import osc
-import osc.conf
-import osc.core
-import osc.cmdln
-
-
-@osc.cmdln.option('-n', '--dry-run', action="store_true",
-                  help="Do not actually run anything but output the resulting XML configuration")
-@osc.cmdln.option('--parent', metavar="PARENT",
-                  help="Setup the repositories to be based on the upstream project PARENT (e.g. for home: projects)")
-@osc.cmdln.option('--distribution',
-                  help="Base distribution, necessary argument unless set previously for this project")
-@osc.cmdln.option('--disable-repo', metavar="REPO", action="append",
-                  help="Temporarily disable all repositories containing REPO")
-@osc.cmdln.option('--only-project', action="store_true", default=False,
-                  help="Only change project metadata 'prj' and 'prjconf', leave individual packages unchanged")
-@osc.cmdln.option('--remove-old', action="store_true", default=False,
-                  help="Remove all obsolete repositories instead of only disabling builds for packages there")
-@osc.cmdln.option('--microarchitecture', metavar="ARCH", nargs=1,
-                  help="Configure project to use ARCH as microarchitecture")
-@osc.cmdln.alias("mpcdf_setup_repos")
-def do_mpcdf_setup_repositories(self, subcmd, opts, *args):
-    """${cmd_name}: Create all repository combinations for an MPCDF project
-
-    Given a list of compilers, MPI libraries, and possibly CUDA versions, this command
-    creates repositories for all the resulting combinations
-
-    Usage:
-        osc ${cmd_name} [PROJECT]
-
-    ${cmd_option_list}
-
-    """
-
-    if len(args) == 0:
-        if osc.core.is_project_dir(os.curdir) or osc.core.is_package_dir(os.curdir):
-            project = osc.core.store_read_project(os.curdir)
-        else:
-            raise osc.oscerr.WrongArgs('Specify PROJECT or run command in an osc checkout directory')
-
-    elif len(args) == 1:
-        project, = args
-    else:
-        raise osc.oscerr.WrongArgs("Too many arguments")
-
-    mpcdf_setup_repositories(self.get_api_url(),
-                             project, microarchitecture=opts.microarchitecture, distribution=opts.distribution,
-                             parent=opts.parent, dry_run=opts.dry_run, filter_repos=opts.disable_repo,
-                             only_project=opts.only_project, remove_old=opts.remove_old)
diff --git a/mpcdf_setup_software_project.py b/mpcdf_setup_software_project.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf153ad8016f83ac2aca5b24a64716b0e0cd1e22
--- /dev/null
+++ b/mpcdf_setup_software_project.py
@@ -0,0 +1,67 @@
+#!/usr/bin/python2
+from __future__ import print_function
+
+import osc
+import osc.conf
+import osc.core
+import osc.cmdln
+
+import mpcdf_common
+
+
+@osc.cmdln.option('-i', '--ignore-repo', action="append", default=[], metavar="REPO",
+                  help="Do not enable for repository REPO")
+@osc.cmdln.alias("mpcdf_setup_software")
+def do_mpcdf_setup_software_project(self, subcmd, opts, *args):
+    """${cmd_name}: Set-up the 'software' project
+
+    This creates the 'prjconf' for the 'software' project and
+    enables the repositories for packages set to build for
+    'system'
+
+    Usage:
+        osc ${cmd_name}
+
+    ${cmd_option_list}
+
+    """
+
+    if len(args) != 0:
+        raise osc.oscerr.WrongArgs("Too many arguments")
+
+    api_url = self.get_api_url()
+
+    prjconf = list(map(mpcdf_common.decode_it, osc.core.show_project_conf(api_url, "software")))
+    start_marker = "# Autogenerated by osc mpcdf_setup_software_project, do not edit till end of section\n"
+    end_marker = "# End of autogenerated section\n"
+
+    try:
+        start = prjconf.index(start_marker)
+        end = prjconf.index(end_marker)
+    except ValueError:
+        start = None
+        end = len(prjconf)
+
+    prjconf_head = "".join(prjconf[:start])
+    prjconf_tail = "".join(prjconf[end + 1:])
+    prjconf = [start_marker]
+
+    prjconf.append("Constraint: sandbox lxc")
+    prjconf.append("Constraint: hostlabel sandybridge")
+    prjconf.append("")
+    prjconf.append(r"PublishFilter: ^.*\.src\.rpm$")
+    prjconf.append("")
+    prjconf.append("Preinstall: mpcdf")
+    prjconf.append("Preinstall: mpcdf_modules")
+    prjconf.append("Preinstall: brp_mpcdf_modules")
+
+    prjconf.append(end_marker)
+    prjconf = prjconf_head + "\n".join(prjconf) + prjconf_tail
+    osc.core.edit_meta("prjconf", "software", data=prjconf)
+
+    if opts.ignore_repo:
+        ignore_repos = opts.ignore_repo
+    else:
+        ignore_repos = ()
+
+    mpcdf_common.mpcdf_enable_repositories_for_all_packages(api_url, "software", ignore_repos=ignore_repos)
diff --git a/mpcdf_setup_subproject.py b/mpcdf_setup_subproject.py
new file mode 100644
index 0000000000000000000000000000000000000000..505fe5dd5717ca4a25c2f0b5f4c64828152233b2
--- /dev/null
+++ b/mpcdf_setup_subproject.py
@@ -0,0 +1,52 @@
+#!/usr/bin/python2
+from __future__ import print_function
+
+from mpcdf_common import mpcdf_setup_subproject
+
+import os
+import osc
+import osc.conf
+import osc.core
+import osc.cmdln
+
+@osc.cmdln.option('-n', '--dry-run', action="store_true",
+                  help="Do not actually run anything but output the resulting XML configuration")
+@osc.cmdln.option('--remove-old', action="store_true", default=False,
+                  help="Remove all obsolete repositories instead of only disabling builds for packages there")
+@osc.cmdln.alias("mpcdf_setup_sub")
+def do_mpcdf_setup_subproject(self, subcmd, opts, *args):
+    """${cmd_name}: Setup a software: sub-project
+
+    Given a list of compilers, MPI libraries, and possibly CUDA versions via
+    the projects attributes, this command creates repositories for all the
+    resulting combinations
+
+    Additionally, the prjconf is set-up to enforce the microarchitecture via
+    a "Constraint:" statement
+
+    Usage:
+        osc ${cmd_name} [software:OS:microarchitecture]
+
+    ${cmd_option_list}
+
+    """
+
+    if len(args) == 0:
+        if osc.core.is_project_dir(os.curdir) or osc.core.is_package_dir(os.curdir):
+            project = osc.core.store_read_project(os.curdir)
+        else:
+            raise osc.oscerr.WrongArgs('Specify PROJECT or run command in an osc checkout directory')
+
+    elif len(args) == 1:
+        project, = args
+    else:
+        raise osc.oscerr.WrongArgs("Too many arguments")
+
+    sw, distribution, microarchitecture = project.split(":")
+
+    if sw != "software":
+        raise osc.oscerr.WrongArgs('Given project is not below the "software" project')
+
+    mpcdf_setup_subproject(self.get_api_url(),
+                           project, distribution, microarchitecture,
+                           dry_run=opts.dry_run, remove_old=opts.remove_old)