diff --git a/.gitignore b/.gitignore
index 3207b308fb50687bf4642640449af2da491e035c..8fe1d88604e078a3283b2986749ce8d13d0fb5a5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,7 +11,7 @@ __pycache__
 .coverage
 try.http
 project/
-test_*
+test_*/
 local/
 target/
 *.swp
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 8acf1d757cb69e0a8fac2ba7f0c08feee89162c7..690e24989a48660ffbfa5039e75eb72462315ca6 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -17,13 +17,16 @@ stages:
   - deploy
 
 variables:
-  TEST_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair:test
+  TEST_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair:${CI_COMMIT_REF_NAME}
   RELEASE_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair:latest
-  FRONTEND_TEST_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair/frontend:test
+  FRONTEND_TEST_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair/frontend:${CI_COMMIT_REF_NAME}
   FRONTEND_RELEASE_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair/frontend:latest
-  RAWAPI_TEST_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair/rawapi:test
+  RAWAPI_TEST_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair/rawapi:${CI_COMMIT_REF_NAME}
   RAWAPI_RELEASE_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair/rawapi:latest
 
+  KUBECONFIG: /etc/deploy/config
+  STAGING_NAMESPACE: nomad-fairdi
+
 build:
   stage: build
   script:
@@ -53,8 +56,8 @@ buildrawapi:
     - docker login -u gitlab-ci-token -p $CI_BUILD_TOKEN gitlab-registry.mpcdf.mpg.de
     - docker build -t $RAWAPI_TEST_IMAGE -f rawapi.Dockerfile .
     - docker push $RAWAPI_TEST_IMAGE
-  except:
-    - /^dev-.*$/
+  only:
+    - rawapi
 
 
 linting:
@@ -160,22 +163,8 @@ release_rawapi:
     - docker pull $RAWAPI_TEST_IMAGE
     - docker tag $RAWAPI_TEST_IMAGE $RAWAPI_RELEASE_IMAGE
     - docker push $RAWAPI_RELEASE_IMAGE
-  except:
-    - /^dev-.*$/
-  when: manual
-
-deploy:
-  stage: deploy
-  script:
-    - docker login -u gitlab-ci-token -p $CI_BUILD_TOKEN gitlab-registry.mpcdf.mpg.de
-    - cd ops/docker-compose/nomad
-    # secret .env file is stored on the "production" machine and is bind mounted into the
-    # runner container at /nomad/config/.env
-    - cp /nomad/config/.env .
-    - docker-compose -f docker-compose.yml -f docker-compose.prod.yml build
-    - docker-compose -f docker-compose.yml -f docker-compose.prod.yml up --no-deps -d api worker gui
-  except:
-    - /^dev-.*$/
+  only:
+    - rawapi
   when: manual
 
 deploy_rawapi:
@@ -187,6 +176,41 @@ deploy_rawapi:
     # runner container at /nomad/config/.env
     - cp /nomad/config/.rawapi_env .env
     - docker-compose -f docker-compose.yml -f docker-compose.prod.yml up -d
+  only:
+    - rawapi
+  when: manual
+
+deploy:
+  stage: deploy
+  image: dtzar/helm-kubectl
+  before_script:
+    - mkdir -p /etc/deploy
+    # kube_config is a CI/CD variable set in GitLab GUI
+    - echo $CI_KUBE_CONFIG | base64 -d > /etc/deploy/config
+    - helm init --client-only
+    - helm repo add stable https://kubernetes-charts.storage.googleapis.com/
+    - helm repo add incubator https://kubernetes-charts-incubator.storage.googleapis.com/
+    - helm repo update
+  script:
+    - cd ops/helm/nomad
+    - export KUBECONFIG=/etc/deploy/config
+    - helm dep build
+    - export NOMAD_VERSION="$(grep "version" Chart.yaml | cut -d" " -f2)"
+    - export NUMERIC_VERSION="$(echo ${NOMAD_VERSION} | cut -d"." -f1-2)"
+    - export RELEASE_NAME="nomad-v${NOMAD_VERSION//./-}"
+    - export DEPLOYS=$(helm ls | grep $RELEASE_NAME | wc -l)
+    - export EXTERNAL_PATH="/fairdi/nomad/v${NOMAD_VERSION}"
+    - export DBNAME="fairdi_nomad_v${NOMAD_VERSION//./_}"
+    - export FILES_PATH="/scratch/nomad-fair/fs/nomad_v${NOMAD_VERSION}"
+    - if [ ${DEPLOYS}  -eq 0 ]; then
+        helm install --name=${RELEASE_NAME} . --namespace=${STAGING_NAMESPACE}
+          --set proxy.nodePort="300${NUMERIC_VERSION//./}"
+          --set proxy.external.path=${EXTERNAL_PATH}
+          --set dbname=${DBNAME}
+          --set volumes.files=${FILES_PATH};
+      else
+        helm upgrade ${RELEASE_NAME} . --namespace=${STAGING_NAMESPACE} --recreate-pods;
+      fi
   except:
     - /^dev-.*$/
   when: manual
diff --git a/.vscode/launch.json b/.vscode/launch.json
index 942b2413f649517fbe214e7e9194f175ce99c081..7e14828fbfc67b4a83f3e7a38cc3703388a3224c 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -44,7 +44,7 @@
       "cwd": "${workspaceFolder}",
       "program": "${workspaceFolder}/.pyenv/bin/pytest",
       "args": [
-        "-sv", "tests/test_normalizing.py::test_normalizer[parsers/cpmd-tests/data/parsers/cpmd/geo_output.out]"
+        "-sv", "tests/test_api.py::TestUploads::test_put[None-multipart-tests/data/proc/examples_template.zip]"
       ]
     },
     {
diff --git a/Dockerfile b/Dockerfile
index 61a6e82a75ed849927147155324b321de71f67af..7f8492335a28813d87d19d2f84f83e7de9aea03d 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -71,13 +71,10 @@ COPY --from=build /install/.dependencies/nomad-meta-info /app/.dependencies/noma
 COPY --from=build /install/docs/.build /app/docs/.build
 
 RUN mkdir -p /app/.volumes/fs
-RUN mkdir -p /nomad
 RUN useradd -ms /bin/bash nomad
 RUN chown -R nomad /app
-RUN chown -R nomad /nomad
 USER nomad
 
 VOLUME /app/.volumes/fs
-VOLUME /nomad
 
 EXPOSE 8000
diff --git a/docs/.DS_Store b/docs/.DS_Store
index 560ae6fbf457a39fcfc8c2f24862414f0648c4c3..efcf6d48ca6cbe1f32bb3708f624e8a39f3a1ada 100644
Binary files a/docs/.DS_Store and b/docs/.DS_Store differ
diff --git a/docs/conf.py b/docs/conf.py
index 32ac4f50ab1a3984eb688af553b5dce3da5dde86..2a5d7d1b325a3ffcda52ae983bfb3d211b9357d2 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -13,7 +13,7 @@
 # documentation root, use os.path.abspath to make it absolute, like shown here.
 import os
 import sys
-from recommonmark.transform import AutoStructify
+# from recommonmark.transform import AutoStructify
 
 sys.path.insert(0, os.path.abspath('..'))
 
@@ -48,16 +48,13 @@ extensions = [
     'sphinxcontrib.httpdomain',
     'sphinxcontrib.autohttp.flask',
     'sphinxcontrib.autohttp.flaskqref',
-    'celery.contrib.sphinx'
+    'celery.contrib.sphinx',
+    'm2r'
 ]
 
 # Add any paths that contain templates here, relative to this directory.
 templates_path = ['.templates']
 
-source_parsers = {
-    '.md': 'recommonmark.parser.CommonMarkParser',
-}
-
 # The suffix(es) of source filenames.
 # You can specify multiple suffix as a list of string:
 #
@@ -179,8 +176,8 @@ todo_include_todos = True
 # Enably sphinx specifc markdown features
 def setup(app):
     app.add_stylesheet('css/custom.css')
-    app.add_config_value('recommonmark_config', {
-        'enable_auto_doc_ref': True,
-        'enable_eval_rst': True
-    }, True)
-    app.add_transform(AutoStructify)
+    # app.add_config_value('recommonmark_config', {
+    #     'enable_auto_doc_ref': True,
+    #     'enable_eval_rst': True
+    # }, True)
+    # app.add_transform(AutoStructify)
diff --git a/docs/index.rst b/docs/index.rst
index d5d2896beeae0b9bea5a5afa8f9e874c3192967e..bb13d55e71ab4ca3fda51f11adfb4b5db719f0df 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -11,4 +11,5 @@ and infrastructure with a simplyfied architecture and consolidated code base.
    setup
    dev_guidelines
    api
+   ops
    reference
diff --git a/docs/nomad-cube.png b/docs/nomad-cube.png
new file mode 100644
index 0000000000000000000000000000000000000000..8840b236be5d242043d1b016f11df516210375fb
Binary files /dev/null and b/docs/nomad-cube.png differ
diff --git a/docs/ops.rst b/docs/ops.rst
new file mode 100644
index 0000000000000000000000000000000000000000..59848f409a02e0162fe59d4eaeac2edd47d9a92b
--- /dev/null
+++ b/docs/ops.rst
@@ -0,0 +1,7 @@
+Operating nomad
+===============
+
+.. mdinclude:: ../ops/README.md
+.. mdinclude:: ../ops/containers/README.md
+.. mdinclude:: ../ops/docker-compose/README.md
+.. mdinclude:: ../ops/helm/README.md
\ No newline at end of file
diff --git a/docs/reference.md b/docs/reference.md
deleted file mode 100644
index 0de5f29928009b505274ca12b11a0f34d58ee60e..0000000000000000000000000000000000000000
--- a/docs/reference.md
+++ /dev/null
@@ -1,48 +0,0 @@
-# Reference
-
-## nomad.config
-```eval_rst
-
-.. automodule:: nomad.config
-    :members:
-```
-
-## nomad.dependencies
-```eval_rst
-.. automodule:: nomad.dependencies
-```
-
-## nomad.files
-```eval_rst
-.. automodule:: nomad.files
-```
-
-## nomad.parsing
-```eval_rst
-.. automodule:: nomad.parsing
-```
-
-## nomad.processing
-```eval_rst
-.. automodule:: nomad.processing
-```
-
-## nomad.repo
-```eval_rst
-.. automodule:: nomad.repo
-```
-
-## nomad.user
-```eval_rst
-.. automodule:: nomad.coe_repo
-```
-
-## nomad.api
-```eval_rst
-.. automodule:: nomad.api
-```
-
-## nomad.utils
-```eval_rst
-.. automodule:: nomad.utils
-```
\ No newline at end of file
diff --git a/docs/reference.rst b/docs/reference.rst
new file mode 100644
index 0000000000000000000000000000000000000000..43055070a0ec09c81c37210f8e678901202a29cf
--- /dev/null
+++ b/docs/reference.rst
@@ -0,0 +1,39 @@
+Reference
+=========
+
+nomad.config
+------------
+.. automodule:: nomad.config
+    :members:
+
+nomad.dependencies
+------------------
+.. automodule:: nomad.dependencies
+
+nomad.files
+-----------
+.. automodule:: nomad.files
+
+nomad.parsing
+-------------
+.. automodule:: nomad.parsing
+
+nomad.processing
+----------------
+.. automodule:: nomad.processing
+
+nomad.repo
+----------
+.. automodule:: nomad.repo
+
+nomad.user
+----------
+.. automodule:: nomad.coe_repo
+
+nomad.api
+---------
+.. automodule:: nomad.api
+
+nomad.utils
+-----------
+.. automodule:: nomad.utils
\ No newline at end of file
diff --git a/gui/.env.development b/gui/.env.development
index f636c30c63ecd377840aa33efddca56c19af059e..ecfa5994b2a52f163576207172f5958e156bbc65 100644
--- a/gui/.env.development
+++ b/gui/.env.development
@@ -1,5 +1,5 @@
 REACT_APP_API_BASE = 'http://localhost:8000/nomad/api'
-REACT_APP_OBJECT_BASE = 'http://localhost:9007'
 REACT_APP_APP_BASE = '/nomad'
 REACT_APP_APP_STATIC_BASE = ''
-REACT_APP_DEBUG = 'true'
\ No newline at end of file
+REACT_APP_DEBUG = 'true'
+REACT_KIBANA_BASE = '/nomad/kibana'
\ No newline at end of file
diff --git a/gui/.env.production b/gui/.env.production
index 7755d9dcad9e47d2023b15e0c718a9f41f268ae2..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644
--- a/gui/.env.production
+++ b/gui/.env.production
@@ -1,5 +0,0 @@
-REACT_APP_API_BASE = '/nomad/api'
-REACT_APP_OBJECT_BASE = '/nomad/objects'
-REACT_APP_APP_BASE = '/nomad'
-REACT_APP_APP_STATIC_BASE = '/nomad'
-REACT_APP_DEBUG = 'true'
\ No newline at end of file
diff --git a/gui/Dockerfile b/gui/Dockerfile
index ca50253601451b93d4aae77f74e443ac7f0a417e..6c6d207a991390f13e6c603ad410e8534928e057 100644
--- a/gui/Dockerfile
+++ b/gui/Dockerfile
@@ -28,10 +28,17 @@ COPY yarn.lock /nomad/app/yarn.lock
 RUN yarn
 COPY . /nomad/app
 
-RUN yarn build
+RUN yarn run build
 
 # production environment
 FROM nginx:1.13.9-alpine
 COPY --from=build /nomad/app/build /app/nomad
 COPY ./gui.conf /etc/nginx/conf.d/default.conf
-CMD ["nginx", "-g", "daemon off;"]
+
+WORKDIR /app/nomad
+CMD nginx -g "daemon off;"
+
+VOLUME /etc/nginx/conf.d
+VOLUME /nomad/app/config
+
+EXPOSE 8080/tcp
diff --git a/gui/package.json b/gui/package.json
index 9ca5bca0291ab111c437b1a8117ef780f6ea5744..663ce5bcae8997493e748cec3b0bfc118f4c06d0 100644
--- a/gui/package.json
+++ b/gui/package.json
@@ -3,14 +3,16 @@
   "version": "0.1.0",
   "private": true,
   "dependencies": {
-    "@material-ui/core": "^1.5.1",
-    "@material-ui/docs": "^1.0.0-alpha.5",
-    "@material-ui/icons": "^2.0.3",
+    "@material-ui/core": "^3.9.0",
+    "@material-ui/icons": "^3.0.2",
     "@navjobs/upload": "^3.1.3",
     "base-64": "^0.1.0",
     "fetch": "^1.1.0",
+    "file-saver": "^2.0.0",
     "html-to-react": "^1.3.3",
+    "marked": "^0.6.0",
     "react": "^16.4.2",
+    "react-cookie": "^3.0.8",
     "react-copy-to-clipboard": "^5.0.1",
     "react-dom": "^16.4.2",
     "react-dropzone": "^5.0.1",
@@ -19,14 +21,16 @@
     "react-router-dom": "^4.3.1",
     "react-router-hash-link": "^1.2.0",
     "react-scripts": "1.1.4",
+    "react-swipeable-views": "^0.13.0",
     "recompose": "^0.28.2",
+    "swagger-client": "^3.8.22",
+    "three.js": "^0.77.1",
     "url-parse": "^1.4.3"
   },
   "scripts": {
-    "metainfo": "git clone --single-branch -b nomad-fair http://gitlab.mpcdf.mpg.de/nomad-lab/nomad-meta-info.git --depth=1 public/metainfo",
     "gitinfo": "echo \"{ \\\"log\\\": \\\"$(git log -1 --oneline)\\\", \\\"ref\\\": \\\"$(git describe --all)\\\", \\\"version\\\": \\\"$(git describe)\\\" }\"  > src/gitinfo.json",
-    "start": "yarn metainfo; react-scripts start",
-    "build": "yarn metainfo; react-scripts build",
+    "start": "react-scripts start",
+    "build": "react-scripts build",
     "test": "react-scripts test --env=jsdom",
     "eject": "react-scripts eject"
   },
@@ -35,10 +39,11 @@
     "eslint": "^4.19.1",
     "eslint-config-standard": "^11.0.0",
     "eslint-plugin-import": "^2.14.0",
+    "eslint-plugin-node": "^8.0.1",
     "eslint-plugin-promise": "^3.7.0",
     "eslint-plugin-react": "^7.11.1",
     "eslint-plugin-standard": "^3.1.0",
     "serve": "^10.0.0"
   },
-  "homepage": "http://mywebsite.com/nomad"
+  "homepage": "."
 }
diff --git a/gui/public/config/env.js b/gui/public/config/env.js
new file mode 100644
index 0000000000000000000000000000000000000000..e334470f3f7bc32fb9b0928a6d01daf81fc3f18a
--- /dev/null
+++ b/gui/public/config/env.js
@@ -0,0 +1,8 @@
+// this is a public file and must only contain configuration that can be publically
+// available
+window.nomadEnv = {
+  "apiBase": "/nomad/api",
+  "appBase": "/nomad",
+  "appStaticBase": "/nomad",
+  "appDebug": false
+};
\ No newline at end of file
diff --git a/gui/public/index.html b/gui/public/index.html
index e87935922e0220b893101ebc8597cc48cad0a132..98243bcf3a7ceab374260e402ee61298c14d5ba0 100644
--- a/gui/public/index.html
+++ b/gui/public/index.html
@@ -29,6 +29,7 @@
     <noscript>
       You need to enable JavaScript to run this app.
     </noscript>
+    <script src="%PUBLIC_URL%/config/env.js"></script>
     <div id="root"></div>
     <!--
       This HTML file is a template.
diff --git a/gui/src/api.js b/gui/src/api.js
deleted file mode 100644
index dca128fa9baaf805ac58f7abcc0b3c38959a9725..0000000000000000000000000000000000000000
--- a/gui/src/api.js
+++ /dev/null
@@ -1,262 +0,0 @@
-import { UploadRequest } from '@navjobs/upload'
-import { apiBase, appStaticBase } from './config'
-
-const auth_headers = {
-  Authorization: 'Basic ' + btoa('sheldon.cooper@nomad-fairdi.tests.de:password')
-}
-
-const networkError = () => {
-  throw Error('Network related error, cannot reach API or object storage.')
-}
-
-const handleJsonErrors = () => {
-  throw Error('Server return unexpected data format.')
-}
-
-const handleResponseErrors = (response) => {
-  if (!response.ok) {
-    return response.json()
-      .catch(() => {
-        throw Error(`API/object storage error (${response.status}): ${response.statusText}`)
-      }).then(data => {
-        throw Error(`API/object storage error (${response.status}): ${data.message}`)
-      })
-  }
-  return response
-}
-
-class Upload {
-  constructor(json, created) {
-    this.uploading = null
-    this._assignFromJson(json, created)
-  }
-
-  uploadFile(file) {
-    console.assert(this.upload_url)
-    this.uploading = 0
-
-    const uploadFileWithProgress = async() => {
-      let { error, aborted } = await UploadRequest(
-        {
-          request: {
-            url: this.upload_url,
-            method: 'PUT',
-            headers: {
-              'Content-Type': 'application/gzip',
-              ...auth_headers
-            }
-          },
-          files: [file],
-          progress: value => {
-            console.log(value)
-            this.uploading = value
-          }
-        }
-      )
-      if (error) {
-        networkError(error)
-      }
-      if (aborted) {
-        throw Error('User abort')
-      }
-    }
-
-    return uploadFileWithProgress()
-      .then(() => this)
-  }
-
-  _assignFromJson(uploadJson, created) {
-    Object.assign(this, uploadJson)
-    if (this.current_task !== this.tasks[0]) {
-      this.uploading = 100
-      this.waiting = false
-    } else if (!created && this.uploading === null) {
-      // if data came from server during a normal get (not create) and its still uploading
-      // and the uploading is also not controlled locally then it ought to be a manual upload
-      this.waiting = true
-    }
-  }
-
-  get(page, perPage, orderBy, order) {
-    if (!page) page = 1
-    if (!perPage) perPage = 5
-    if (!orderBy) orderBy = 'mainfile'
-    if (!order) order = 'desc'
-
-    order = order === 'desc' ? -1 : 1
-
-    if (this.uploading !== null && this.uploading !== 100) {
-      return new Promise(resolve => resolve(this))
-    } else {
-      const qparams = `page=${page}&per_page=${perPage}&order_by=${orderBy}&order=${order}`
-      return fetch(
-        `${apiBase}/uploads/${this.upload_id}?${qparams}`,
-        {
-          method: 'GET',
-          headers: auth_headers
-        })
-        .catch(networkError)
-        .then(handleResponseErrors)
-        .then(response => response.json())
-        .then(uploadJson => {
-          this._assignFromJson(uploadJson)
-          return this
-        })
-    }
-  }
-}
-
-function createUpload(name) {
-  const fetchData = {
-    method: 'POST',
-    body: JSON.stringify({
-      name: name
-    }),
-    headers: {
-      'Content-Type': 'application/json',
-      ...auth_headers
-    }
-  }
-  return fetch(`${apiBase}/uploads`, fetchData)
-    .catch(networkError)
-    .then(handleResponseErrors)
-    .then(response => response.json())
-    .then(uploadJson => new Upload(uploadJson, true))
-}
-
-function getUploads() {
-  return fetch(
-    `${apiBase}/uploads`,
-    {
-      method: 'GET',
-      headers: auth_headers
-    })
-    .catch(networkError)
-    .then(handleResponseErrors)
-    .then(response => response.json())
-    .then(uploadsJson => uploadsJson.map(uploadJson => new Upload(uploadJson)))
-}
-
-function archive(uploadHash, calcHash) {
-  return fetch(archiveUrl(uploadHash, calcHash))
-    .catch(networkError)
-    .then(handleResponseErrors)
-    .then(response => response.json())
-}
-
-function calcProcLog(archiveId) {
-  return fetch(`${apiBase}/logs/${archiveId}`)
-    .catch(networkError)
-    .then(response => {
-      if (!response.ok) {
-        if (response.status === 404) {
-          return ''
-        } else {
-          return handleResponseErrors(response)
-        }
-      } else {
-        return response.text()
-      }
-    })
-}
-
-function archiveUrl(uploadHash, calcHash) {
-  return `${apiBase}/archive/${uploadHash}/${calcHash}`
-}
-
-function repo(uploadHash, calcHash) {
-  return fetch(`${apiBase}/repo/${uploadHash}/${calcHash}`)
-    .catch(networkError)
-    .then(handleResponseErrors)
-    .then(response => response.json())
-}
-
-function repoAll(page, perPage, owner) {
-  return fetch(
-    `${apiBase}/repo?page=${page}&per_page=${perPage}&owner=${owner || 'all'}`,
-    {
-      method: 'GET',
-      headers: auth_headers
-    })
-    .catch(networkError)
-    .then(handleResponseErrors)
-    .then(response => response.json())
-}
-
-function deleteUpload(uploadId) {
-  return fetch(
-    `${apiBase}/uploads/${uploadId}`,
-    {
-      method: 'DELETE',
-      headers: auth_headers
-    })
-    .catch(networkError)
-    .then(handleResponseErrors)
-    .then(response => response.json())
-}
-
-function unstageUpload(uploadId) {
-  return fetch(
-    `${apiBase}/uploads/${uploadId}`,
-    {
-      method: 'POST',
-      body: JSON.stringify({
-        operation: 'unstage'
-      }),
-      headers: {
-        'Content-Type': 'application/json',
-        ...auth_headers
-      }
-    })
-    .catch(networkError)
-    .then(handleResponseErrors)
-    .then(response => response.json())
-}
-
-let cachedMetaInfo = null
-
-async function getMetaInfo() {
-  if (cachedMetaInfo) {
-    return cachedMetaInfo
-  } else {
-    const loadMetaInfo = async(path) => {
-      return fetch(`${appStaticBase}/metainfo/meta_info/nomad_meta_info/${path}`)
-        .catch(networkError)
-        .then(handleResponseErrors)
-        .then(response => response.json())
-        .catch(handleJsonErrors)
-        .then(data => {
-          if (!cachedMetaInfo) {
-            cachedMetaInfo = {}
-          }
-          if (data.dependencies) {
-            data.dependencies.forEach(dep => {
-              loadMetaInfo(dep.relativePath)
-            })
-          }
-          if (data.metaInfos) {
-            data.metaInfos.forEach(info => {
-              cachedMetaInfo[info.name] = info
-            })
-          }
-        })
-    }
-    await loadMetaInfo('all.nomadmetainfo.json')
-    return cachedMetaInfo
-  }
-}
-
-const api = {
-  createUpload: createUpload,
-  deleteUpload: deleteUpload,
-  unstageUpload: unstageUpload,
-  getUploads: getUploads,
-  archive: archive,
-  calcProcLog: calcProcLog,
-  archiveUrl: archiveUrl,
-  repo: repo,
-  repoAll: repoAll,
-  getMetaInfo: getMetaInfo
-}
-
-export default api
diff --git a/gui/src/components/App.js b/gui/src/components/App.js
index 4bb242074072665b79ee5deb8444dccb7294be1b..369bf2d934efb503fcb6bc9599963d6907cc398e 100644
--- a/gui/src/components/App.js
+++ b/gui/src/components/App.js
@@ -4,36 +4,43 @@ import { genTheme, appBase } from '../config'
 import Navigation from './Navigation'
 import { BrowserRouter, Switch, Route } from 'react-router-dom'
 import Uploads from './Uploads'
-import ArchiveCalc from './ArchiveCalc'
-import RepoCalc from './RepoCalc'
 import Repo from './Repo'
 import Documentation from './Documentation'
 import Development from './Development'
 import Home from './Home'
+import { HelpProvider } from './help'
+import { ApiProvider } from './api'
+import { ErrorSnacks } from './errors'
 
-function App() {
-  return (
-    <MuiThemeProvider theme={genTheme}>
-      <BrowserRouter basename={appBase}>
-        <Navigation>
-          <Switch>
-            <Route exact path="/" component={Home} />
-            <Route exact path="/repo" component={Repo} />
-            <Route path="/repo/:uploadHash/:calcHash" component={RepoCalc} />
-            <Route path="/upload" component={Uploads} />
-            <Route exact path="/archive" render={() => <div>Archive</div>} />
-            <Route path="/archive/:uploadHash/:calcHash" component={ArchiveCalc} />
-            <Route path="/enc" render={() => <div>{'In the future, you\'ll see charts\'n\'stuff for your calculations and materials.'}</div>} />
-            <Route path="/analytics" render={() => <div>{'In the future, you\'ll see analytics notebooks here.'}</div>} />
-            <Route path="/profile" render={() => <div>Profile</div>} />
-            <Route path="/docs" component={Documentation} />
-            <Route path="/dev" component={Development} />
-            <Route render={() => <div>Not found</div>} />
-          </Switch>
-        </Navigation>
-      </BrowserRouter>
-    </MuiThemeProvider>
-  )
+export default class App extends React.Component {
+  render() {
+    return (
+      <MuiThemeProvider theme={genTheme}>
+        <ErrorSnacks>
+          <BrowserRouter basename={appBase}>
+            <HelpProvider>
+              <ApiProvider>
+                <Navigation>
+                  <Switch>
+                    <Route exact path="/" component={Home} />
+                    <Route exact path="/repo" component={Repo} />
+                    {/* <Route path="/repo/:uploadId/:calcId" component={RepoCalc} /> */}
+                    <Route path="/upload" component={Uploads} />
+                    <Route exact path="/archive" render={() => <div>Archive</div>} />
+                    {/* <Route path="/archive/:uploadId/:calcId" component={ArchiveCalc} /> */}
+                    <Route path="/enc" render={() => <div>{'In the future, you\'ll see charts\'n\'stuff for your calculations and materials.'}</div>} />
+                    <Route path="/analytics" render={() => <div>{'In the future, you\'ll see analytics notebooks here.'}</div>} />
+                    <Route path="/profile" render={() => <div>Profile</div>} />
+                    <Route path="/docs" component={Documentation} />
+                    <Route path="/dev" component={Development} />
+                    <Route render={() => <div>Not found</div>} />
+                  </Switch>
+                </Navigation>
+              </ApiProvider>
+            </HelpProvider>
+          </BrowserRouter>
+        </ErrorSnacks>
+      </MuiThemeProvider>
+    )
+  }
 }
-
-export default App
diff --git a/gui/src/components/ArchiveCalc.js b/gui/src/components/ArchiveCalc.js
deleted file mode 100644
index 81500cdcfb0c3a23bdd3bff90349c5495791bb3d..0000000000000000000000000000000000000000
--- a/gui/src/components/ArchiveCalc.js
+++ /dev/null
@@ -1,167 +0,0 @@
-import React from 'react'
-import PropTypes from 'prop-types'
-import { withStyles, Paper, LinearProgress, Typography, Popover } from '@material-ui/core'
-import ReactJson from 'react-json-view'
-import api from '../api'
-import Markdown from './Markdown'
-import { compose } from 'recompose'
-import { withErrors } from './errors'
-import CalcProcLogPopper from './CalcProcLogPopper'
-
-class ArchiveCalc extends React.Component {
-  static propTypes = {
-    classes: PropTypes.object.isRequired,
-    raiseError: PropTypes.func.isRequired,
-    match: PropTypes.object.isRequired
-  }
-  static styles = theme => ({
-    root: {},
-    calcData: {
-      padding: theme.spacing.unit
-    },
-    logs: {
-      marginTop: theme.spacing.unit * 2,
-      padding: theme.spacing.unit
-    },
-    metaInfo: {
-      height: 120,
-      padding: theme.spacing.unit * 2,
-      overflowY: 'scroll'
-    },
-    metaInfoInstructions: {
-      height: 100,
-      paddingTop: 30,
-      textAlign: 'center',
-      color: 'grey'
-    },
-    logLink: {
-      fontSize: '1rem',
-      lineHeight: '2',
-      marginBlockStart: '1rem',
-      marginBlockEnd: '1rem',
-      '& a': {
-        color: theme.palette.secondary.main,
-        textDecoration: 'none',
-        '&:hover': {
-          textDecoration: 'underline'
-        }
-      }
-    }
-  });
-
-  static metainfo = null
-
-  state = {
-    data: null,
-    logs: null,
-    metaInfo: null,
-    showMetaInfo: false,
-    showLogs: false
-  }
-
-  constructor(props) {
-    super(props)
-    this.logPopperAnchor = React.createRef()
-  }
-
-  componentDidMount() {
-    const {uploadHash, calcHash} = this.props.match.params
-    api.archive(uploadHash, calcHash).then(data => {
-      this.setState({data: data})
-    }).catch(error => {
-      this.setState({data: null})
-      this.props.raiseError(error)
-    })
-
-    api.getMetaInfo().then(metaInfo => {
-      this.setState({metaInfo: metaInfo})
-    }).catch(error => {
-      this.props.raiseError(error)
-    })
-  }
-
-  handleShowMetaInfo(selection, more) {
-    if (selection.name === '_name') {
-      this.setState({showMetaInfo: selection.value})
-    } else {
-      this.setState({showMetaInfo: selection.name})
-    }
-  }
-
-  render() {
-    const { classes } = this.props
-    const { data, showMetaInfo, metaInfo } = this.state
-    const metaInfoData = metaInfo ? metaInfo[showMetaInfo] : null
-    const { uploadHash, calcHash } = this.props.match.params
-    return (
-      <div className={classes.root} ref={this.logPopperAnchor}>
-        <Markdown>{`
-          ## The Archive – Code Independent Data
-          All values in the archive data have a specific type and documentation
-          associated with this type. This information is called the *meta-info*.
-          You can learn more about the different *sections* and
-          *quantities* by visiting the [meta-info](/metainfo) browser.
-
-          The tree below shows all calculation data in nomad's *hierachical* and
-          *code independent* archive format. You can download it
-          [here](${api.archiveUrl(uploadHash, calcHash)}). Click on values to
-          see a *meta-info* description.
-        `}</Markdown>
-        <Typography className={classes.logLink}>
-          The processing logs are available <a href="#" onClick={() => this.setState({showLogs: true})}>here</a>.
-        </Typography>
-        <Paper className={classes.calcData}>
-          {
-            data
-              ? <ReactJson
-                src={this.state.data}
-                enableClipboard={false}
-                collapsed={4}
-                displayObjectSize={false}
-                onSelect={this.handleShowMetaInfo.bind(this)}/>
-              : <LinearProgress variant="query" />
-          }
-        </Paper>
-        <CalcProcLogPopper
-          open={this.state.showLogs}
-          archiveId={`${uploadHash}/${calcHash}`}
-          onClose={() => this.setState({showLogs: false})}
-          anchorEl={this.logPopperAnchor.current}
-          raiseError={this.props.raiseError}
-        />
-        <Popover
-          open={(showMetaInfo && metaInfo && metaInfoData) ? true : false}
-          anchorEl={this.logPopperAnchor.current}
-          onClose={() => this.setState({showMetaInfo: null})}
-          anchorOrigin={{
-            vertical: 'center',
-            horizontal: 'center',
-          }}
-          transformOrigin={{
-            vertical: 'center',
-            horizontal: 'center',
-          }}
-        >
-          <Paper className={classes.metaInfo}>
-            {showMetaInfo && metaInfo
-              ? metaInfoData
-                ? <div>
-                  <Typography variant="title">{metaInfoData.name}</Typography>
-                  <Markdown>{metaInfoData.description}</Markdown>
-                </div>
-                : <div className={classes.metaInfoInstructions}>
-                      this value has no meta-info attached to it
-                </div>
-              : <div className={classes.metaInfoInstructions}>
-                  click a value to show its meta-info
-              </div>
-            }
-          </Paper>
-        </Popover>
-      </div>
-
-    )
-  }
-}
-
-export default compose(withErrors, withStyles(ArchiveCalc.styles))(ArchiveCalc)
diff --git a/gui/src/components/ArchiveCalcView.js b/gui/src/components/ArchiveCalcView.js
new file mode 100644
index 0000000000000000000000000000000000000000..283f85eff5526503a2514d9843afa4e4e81e9649
--- /dev/null
+++ b/gui/src/components/ArchiveCalcView.js
@@ -0,0 +1,98 @@
+import React from 'react'
+import PropTypes from 'prop-types'
+import { withStyles, LinearProgress } from '@material-ui/core'
+import ReactJson from 'react-json-view'
+import { compose } from 'recompose'
+import { withErrors } from './errors'
+import Markdown from './Markdown'
+import { withApi } from './api'
+
+class ArchiveCalcView extends React.Component {
+  static propTypes = {
+    classes: PropTypes.object.isRequired,
+    api: PropTypes.object.isRequired,
+    raiseError: PropTypes.func.isRequired,
+    uploadId: PropTypes.string.isRequired,
+    calcId: PropTypes.string.isRequired
+  }
+
+  static styles = theme => ({
+    root: {
+      display: 'flex',
+      flexDirection: 'column',
+      height: '100%'
+    },
+    metaInfo: {
+      flex: '0 0 auto',
+      overflowY: 'auto'
+    },
+    data: {
+      flex: '1 1',
+      overflowY: 'auto'
+    }
+  });
+
+  constructor(props) {
+    super(props)
+    this.state = {
+      data: null,
+      metaInfo: null,
+      showMetaInfo: false
+    }
+  }
+
+  componentDidMount() {
+    const {uploadId, calcId, api} = this.props
+    api.archive(uploadId, calcId).then(data => {
+      this.setState({data: data})
+    }).catch(error => {
+      this.setState({data: null})
+      this.props.raiseError(error)
+    })
+
+    api.getMetaInfo().then(metaInfo => {
+      this.setState({metaInfo: metaInfo})
+    }).catch(error => {
+      this.props.raiseError(error)
+    })
+  }
+
+  handleShowMetaInfo(selection, more) {
+    if (selection.name === '_name') {
+      this.setState({showMetaInfo: selection.value})
+    } else {
+      this.setState({showMetaInfo: selection.name})
+    }
+  }
+
+  render() {
+    const { classes } = this.props
+    const { data, showMetaInfo, metaInfo } = this.state
+    const metaInfoData = metaInfo ? metaInfo[showMetaInfo] : null
+
+    return (
+      <div className={classes.root}>
+        <div className={classes.data}>{
+          data
+            ? <ReactJson
+              src={this.state.data}
+              enableClipboard={false}
+              collapsed={4}
+              displayObjectSize={false}
+              onSelect={this.handleShowMetaInfo.bind(this)} />
+            : <LinearProgress variant="query" />
+        }</div>
+        <div className={classes.metaInfo}>{
+          showMetaInfo && metaInfo
+            ? metaInfoData
+              ? <Markdown>{`**${metaInfoData.name}**: ${metaInfoData.description}`}</Markdown>
+              : <Markdown>This value has **no** *meta-info* attached to it.</Markdown>
+            : <Markdown>Click a value to show its *meta-info*!</Markdown>
+        }
+        </div>
+      </div>
+    )
+  }
+}
+
+export default compose(withApi(false), withErrors, withStyles(ArchiveCalcView.styles))(ArchiveCalcView)
diff --git a/gui/src/components/ArchiveLogView.js b/gui/src/components/ArchiveLogView.js
new file mode 100644
index 0000000000000000000000000000000000000000..8776c5cdbc02a0616707c255572c047e69ebbd57
--- /dev/null
+++ b/gui/src/components/ArchiveLogView.js
@@ -0,0 +1,53 @@
+import React from 'react'
+import PropTypes from 'prop-types'
+import { withStyles, LinearProgress } from '@material-ui/core'
+import { compose } from 'recompose'
+import { withErrors } from './errors'
+import { withApi } from './api'
+
+class ArchiveLogView extends React.Component {
+  static propTypes = {
+    classes: PropTypes.object.isRequired,
+    api: PropTypes.object.isRequired,
+    raiseError: PropTypes.func.isRequired,
+    uploadId: PropTypes.string.isRequired,
+    calcId: PropTypes.string.isRequired
+  }
+
+  static styles = theme => ({
+    root: {}
+  });
+
+  constructor(props) {
+    super(props)
+    this.state = {
+      data: null
+    }
+  }
+
+  componentDidMount() {
+    const {uploadId, calcId, api, raiseError} = this.props
+    api.calcProcLog(uploadId, calcId).then(data => {
+      this.setState({data: data})
+    }).catch(error => {
+      this.setState({data: null})
+      raiseError(error)
+    })
+  }
+
+  render() {
+    const { classes } = this.props
+    const { data } = this.state
+
+    return (
+      <div className={classes.root}>{
+        data
+          ? <pre>{data}</pre>
+          : <LinearProgress variant="query" />
+      }
+      </div>
+    )
+  }
+}
+
+export default compose(withApi(false), withErrors, withStyles(ArchiveLogView.styles))(ArchiveLogView)
diff --git a/gui/src/components/CalcDialog.js b/gui/src/components/CalcDialog.js
new file mode 100644
index 0000000000000000000000000000000000000000..881cf83f2e0d7f837699fbe39a58f40513aeb363
--- /dev/null
+++ b/gui/src/components/CalcDialog.js
@@ -0,0 +1,198 @@
+import React from 'react'
+import PropTypes from 'prop-types'
+import { withStyles, Dialog, DialogContent, DialogActions, Button, DialogTitle, Tab, Tabs,
+  Typography, Divider, LinearProgress } from '@material-ui/core'
+import SwipeableViews from 'react-swipeable-views'
+import ArchiveCalcView from './ArchiveCalcView'
+import ArchiveLogView from './ArchiveLogView'
+import { withApi } from './api'
+import { compose } from 'recompose'
+import RawFiles from './RawFiles'
+
+function CalcQuantity(props) {
+  const {children, label, typography} = props
+  return (
+    <div style={{margin: '0px 24px 8px 0'}}>
+      <Typography variant="caption">{label}</Typography>
+      <Typography variant={typography || 'body1'}>{children || 'loading...'}</Typography>
+    </div>
+  )
+}
+
+CalcQuantity.propTypes = {
+  classes: PropTypes.object,
+  children: PropTypes.node,
+  label: PropTypes.string,
+  typography: PropTypes.string
+}
+
+class CalcDialog extends React.Component {
+  static styles = theme => ({
+    dialog: {
+
+    },
+    dialogTitle: {
+      padding: 0
+    },
+    dialogContent: {
+      paddingBottom: 0
+    },
+    tabContent: {
+      paddingTop: theme.spacing.unit * 3,
+      overflowY: 'auto',
+      height: '70vh',
+      zIndex: 1
+    },
+    quantityRow: {
+      display: 'flex',
+      flexDirection: 'row',
+      flexWrap: 'wrap',
+      marginBottom: theme.spacing.unit
+    }
+  })
+
+  static propTypes = {
+    classes: PropTypes.object.isRequired,
+    api: PropTypes.object.isRequired,
+    raiseError: PropTypes.func.isRequired,
+    uploadId: PropTypes.string.isRequired,
+    calcId: PropTypes.string.isRequired,
+    onClose: PropTypes.func.isRequired
+  }
+
+  state = {
+    open: false,
+    calcData: null,
+    viewIndex: 0
+  }
+
+  componentDidMount() {
+    const {uploadId, calcId} = this.props
+    this.props.api.repo(uploadId, calcId).then(data => {
+      this.setState({calcData: data})
+    }).catch(error => {
+      this.setState({calcData: null})
+      this.props.raiseError(error)
+    })
+  }
+
+  data(quantity) {
+    const path = quantity.split('.')
+    let data = this.state.calcData
+    for (let i = 0; i < path.length; i++) {
+      if (data) {
+        data = data[path[i]]
+      }
+    }
+    return data
+  }
+
+  renderQuantity(quantity, label, defaultValue) {
+    const value = this.data(quantity) || defaultValue || ''
+
+    return (
+      <div key={quantity}>
+        <Typography variant="caption">{label}</Typography>
+        <Typography variant="body1">{value}</Typography>
+      </div>
+    )
+  }
+
+  render() {
+    const { classes, onClose, ...calcProps } = this.props
+    const { viewIndex } = this.state
+
+    const filePaths = this.data('section_repository_info.repository_filepaths') || []
+
+    return (
+      <Dialog className={classes.dialog} open={true} onClose={onClose} fullWidth={true} maxWidth={'md'} >
+        <DialogTitle disableTypography classes={{root: classes.dialogTitle}}>
+          {(!this.state.calcData) ? <LinearProgress /> : ''}
+          <Tabs
+            className={classes.tabs}
+            value={viewIndex}
+            onChange={(event, state) => this.setState({viewIndex: state})}
+            indicatorColor="primary"
+            textColor="primary"
+            variant="fullWidth"
+          >
+            <Tab label="Raw data" />
+            <Tab label="Archive" />
+            <Tab label="Logs" />
+          </Tabs>
+        </DialogTitle>
+        <DialogContent classes={{root: classes.dialogContent}}>
+          <SwipeableViews
+            // axis={theme.direction === 'rtl' ? 'x-reverse' : 'x'}
+            index={viewIndex}
+            onChangeIndex={() => null}
+          >
+            <div className={classes.tabContent}>
+              <div className={classes.quantityRow}>
+                <CalcQuantity label="chemical formula" typography="h4">
+                  {this.data('section_repository_info.section_repository_parserdata.repository_chemical_formula')}
+                </CalcQuantity>
+              </div>
+              <div className={classes.quantityRow}>
+                <CalcQuantity label='dft code'>
+                  {this.data('section_repository_info.section_repository_parserdata.repository_program_name')}
+                </CalcQuantity>
+                <CalcQuantity label='dft code version'>
+                  {this.data('section_repository_info.section_repository_parserdata.repository_code_version')}
+                </CalcQuantity>
+              </div>
+              <div className={classes.quantityRow}>
+                <CalcQuantity label='basis set'>
+                  {this.data('section_repository_info.section_repository_parserdata.repository_basis_set_type')}
+                </CalcQuantity>
+                <CalcQuantity label='xc functional'>
+                  {this.data('section_repository_info.section_repository_parserdata.repository_xc_treatment')}
+                </CalcQuantity>
+              </div>
+              <div className={classes.quantityRow}>
+                <CalcQuantity label='system type'>
+                  {this.data('section_repository_info.section_repository_parserdata.repository_system_type')}
+                </CalcQuantity>
+                <CalcQuantity label='crystal system'>
+                  {this.data('section_repository_info.section_repository_parserdata.repository_crystal_system')}
+                </CalcQuantity>
+                <CalcQuantity label='spacegroup'>
+                  {this.data('section_repository_info.section_repository_parserdata.repository_spacegroup_nr')}
+                </CalcQuantity>
+              </div>
+              <div className={classes.quantityRow}>
+                <CalcQuantity label='upload id'>
+                  {this.data('section_calculation_info.upload_id')}
+                </CalcQuantity>
+                <CalcQuantity label='calculation id'>
+                  {this.data('section_calculation_info.calc_id')}
+                </CalcQuantity>
+                <CalcQuantity label='mainfile'>
+                  {this.data('section_calculation_info.main_file')}
+                </CalcQuantity>
+                <CalcQuantity label='calculation hash'>
+                  {this.data('section_calculation_info.calc_hash')}
+                </CalcQuantity>
+              </div>
+              <Divider />
+              <RawFiles {...calcProps} files={filePaths} />
+            </div>
+            <div className={classes.tabContent}>
+              <ArchiveCalcView {...calcProps} />
+            </div>
+            <div className={classes.tabContent}>
+              <ArchiveLogView {...calcProps} />
+            </div>
+          </SwipeableViews>
+        </DialogContent>
+        <DialogActions>
+          <Button onClick={onClose} color="primary" autoFocus>
+              Close
+          </Button>
+        </DialogActions>
+      </Dialog>
+    )
+  }
+}
+
+export default compose(withApi(false), withStyles(CalcDialog.styles))(CalcDialog)
diff --git a/gui/src/components/CalcLinks.js b/gui/src/components/CalcLinks.js
deleted file mode 100644
index 842e67d7f83d3d7e56643201ca769cefff3c7fa8..0000000000000000000000000000000000000000
--- a/gui/src/components/CalcLinks.js
+++ /dev/null
@@ -1,46 +0,0 @@
-import React from 'react'
-import PropTypes from 'prop-types'
-import { MuiThemeProvider, IconButton, withStyles } from '@material-ui/core'
-import RepoIcon from '@material-ui/icons/Cloud'
-import ArchiveIcon from '@material-ui/icons/Storage'
-import EncIcon from '@material-ui/icons/Assessment'
-import { repoTheme, archiveTheme, encTheme } from '../config'
-import Link from 'react-router-dom/Link'
-
-class CalcLink extends React.Component {
-  static propTypes = {
-    classes: PropTypes.object.isRequired,
-    calcId: PropTypes.string,
-    uploadHash: PropTypes.string,
-    calcHash: PropTypes.string,
-    disabled: PropTypes.bool
-  }
-
-  static styles = theme => ({
-    root: {
-      overflow: 'hidden',
-      whiteSpace: 'nowrap'
-    }
-  });
-
-  render() {
-    const { uploadHash, calcHash, classes, calcId, disabled } = this.props
-    const id = calcId || `${uploadHash}/${calcHash}`
-
-    return (
-      <div className={classes.root}>
-        <MuiThemeProvider theme={repoTheme}>
-          <IconButton color="primary" component={Link} to={`/repo/${id}`} disabled={disabled}><RepoIcon /></IconButton>
-        </MuiThemeProvider>
-        <MuiThemeProvider theme={archiveTheme}>
-          <IconButton color="primary" component={Link} to={`/archive/${id}`} disabled={disabled}><ArchiveIcon /></IconButton>
-        </MuiThemeProvider>
-        <MuiThemeProvider theme={encTheme}>
-          <IconButton color="primary" component={Link} to={`/enc/${id}`} disabled={disabled}><EncIcon /></IconButton>
-        </MuiThemeProvider>
-      </div>
-    )
-  }
-}
-
-export default withStyles(CalcLink.styles)(CalcLink)
diff --git a/gui/src/components/CalcProcLogPopper.js b/gui/src/components/CalcProcLogPopper.js
deleted file mode 100644
index e6b1a3e265735ff1d6268e0c67b39ba4de4041d8..0000000000000000000000000000000000000000
--- a/gui/src/components/CalcProcLogPopper.js
+++ /dev/null
@@ -1,71 +0,0 @@
-import React from 'react'
-import PropTypes from 'prop-types'
-import { withStyles } from '@material-ui/core/styles'
-import Paper from '@material-ui/core/Paper'
-import api from '../api'
-import { Popover } from '@material-ui/core'
-
-
-class CalcProcLogPopper extends React.Component {
-
-  static propTypes = {
-    classes: PropTypes.object.isRequired,
-    raiseError: PropTypes.func.isRequired,
-    archiveId: PropTypes.string.isRequired,
-    open: PropTypes.bool,
-    onClose: PropTypes.func,
-    anchorEl: PropTypes.any
-  }
-
-  static styles = theme => ({
-    paper: {
-      padding: theme.spacing.unit * 2,
-    },
-  })
-
-  state = {
-    logs: null
-  }
-
-  componentDidMount() {
-    const {archiveId} = this.props
-    api.calcProcLog(archiveId).then(logs => {
-      if (logs && logs !== '') {
-        this.setState({logs: logs})
-      }
-    }).catch(error => {
-      this.setState({data: null})
-      this.props.raiseError(error)
-    })
-  }
-
-  render() {
-    const { classes, open, anchorEl, onClose } = this.props
-    const { logs } = this.state
-    return (
-      <div>
-        <Popover
-          open={open}
-          anchorEl={anchorEl}
-          onClose={onClose}
-          anchorOrigin={{
-            vertical: 'center',
-            horizontal: 'center',
-          }}
-          transformOrigin={{
-            vertical: 'center',
-            horizontal: 'center',
-          }}
-        >
-          <Paper className={classes.paper}>
-            <pre>
-              {logs ? logs : 'loading...'}
-            </pre>
-          </Paper>
-        </Popover>
-      </div>
-    )
-  }
-}
-
-export default withStyles(CalcProcLogPopper.styles)(CalcProcLogPopper)
\ No newline at end of file
diff --git a/gui/src/components/Development.js b/gui/src/components/Development.js
index f1fadf5c3ef996d38b3f24c7e0134a25d0a177eb..98c356cbe55579843bf810c68f0992088cf8c893 100644
--- a/gui/src/components/Development.js
+++ b/gui/src/components/Development.js
@@ -3,6 +3,7 @@ import PropTypes from 'prop-types'
 import { withStyles } from '@material-ui/core/styles'
 import Markdown from './Markdown'
 import gitInfo from '../gitinfo'
+import { kibanaBase, apiBase } from '../config'
 
 class Development extends React.Component {
   static propTypes = {
@@ -19,18 +20,24 @@ class Development extends React.Component {
     return (
       <div className={classes.root}>
         <Markdown>{`
-          # Build info
+          ### Build info
           - version: \`${gitInfo.version}\`
           - ref: \`${gitInfo.ref}\`
           - last commit message: *${gitInfo.log}*
 
-          \n\n# Elastic stack
+          ### ReST API
+          Nomad services can also be accessed programatically via nomad's
+          ReST API. The API is described via [swagger](https://swagger.io/), therefore
+          you can use your favorit swagger client library (e.g.
+          [bravado](https://github.com/Yelp/bravado) for Python).
+          Here is [our API's swagger UI](${apiBase}/) as reference documentation.
+
+          ### Elastic stack
           We use a central logging system based on the *elastic*-stack
           (previously called *Elastic Logstash Kibana* (ELK)-stack).
           This system pushes logs, events, monitoring data,
           and other application metrics to a central database where it
-          can be analysed visually.
-          \n\n[Link to Kiaba](/nomad/kibana/)
+          can be analysed visually. Here is the [link to Kiaba](${kibanaBase}/)
         `}</Markdown>
       </div>
     )
diff --git a/gui/src/components/Documentation.js b/gui/src/components/Documentation.js
index 5ed652a68df1da9f1c685dc191c7a4313c427055..7f452964d4b171a7110d8016154338fbbea52238 100644
--- a/gui/src/components/Documentation.js
+++ b/gui/src/components/Documentation.js
@@ -25,7 +25,7 @@ class Documentation extends Component {
     return (
       <div className={classes.root}>
         <div className={classes.content}>
-          <iframe
+          <iframe title="documentation"
             frameBorder={0} width="700" height={window.innerHeight - 64}
             src={`${apiBase}/docs/index.html`}
           />
diff --git a/gui/src/components/Home.js b/gui/src/components/Home.js
index 880717dd160917b6e1e05f49db7af0d85867ef80..9bf4ee8e7d85116e5db32ed82f6af62440f423ac 100644
--- a/gui/src/components/Home.js
+++ b/gui/src/components/Home.js
@@ -3,13 +3,12 @@ import PropTypes from 'prop-types'
 import { withStyles } from '@material-ui/core'
 import Markdown from './Markdown'
 
-
 class Home extends React.Component {
   static propTypes = {
     classes: PropTypes.object.isRequired
   }
   static styles = theme => ({
-    root: {},
+    root: {}
   });
 
   render() {
diff --git a/gui/src/components/LoginLogout.js b/gui/src/components/LoginLogout.js
new file mode 100644
index 0000000000000000000000000000000000000000..bc079d901bf3f5d5bad0263dcf9935309ba8d198
--- /dev/null
+++ b/gui/src/components/LoginLogout.js
@@ -0,0 +1,164 @@
+import React from 'react'
+import PropTypes from 'prop-types'
+import { withStyles } from '@material-ui/core/styles'
+import Typography from '@material-ui/core/Typography'
+import { compose } from 'recompose'
+import { Button, DialogTitle, DialogContent, DialogContentText, TextField, DialogActions,
+  Dialog, FormGroup, LinearProgress } from '@material-ui/core'
+import { withApi } from './api'
+
+class LoginLogout extends React.Component {
+  static propTypes = {
+    classes: PropTypes.object.isRequired,
+    api: PropTypes.object.isRequired,
+    isLoggingIn: PropTypes.bool,
+    user: PropTypes.object,
+    login: PropTypes.func.isRequired,
+    logout: PropTypes.func.isRequired,
+    variant: PropTypes.string,
+    color: PropTypes.string
+  }
+
+  static styles = theme => ({
+    root: {
+      display: 'flex',
+      alignItems: 'center',
+      '& p': {
+        marginRight: theme.spacing.unit * 2
+      }
+    },
+    button: {}, // to allow overrides
+    buttonDisabled: {},
+    errorText: {
+      marginTop: theme.spacing.unit,
+      marginBottom: theme.spacing.unit
+    }
+  })
+
+  constructor(props) {
+    super(props)
+    this.handleLogout = this.handleLogout.bind(this)
+    this.handleChange = this.handleChange.bind(this)
+  }
+
+  state = {
+    loginDialogOpen: false,
+    userName: '',
+    password: '',
+    failure: false
+  }
+
+  componentDidMount() {
+    this._ismounted = true
+  }
+
+  componentWillUnmount() {
+    this._ismounted = false
+  }
+
+  handleLoginDialogClosed(withLogin) {
+    if (withLogin) {
+      this.props.login(this.state.userName, this.state.password, (success) => {
+        if (this._ismounted) {
+          if (success) {
+            this.setState({loginDialogOpen: false, failure: false})
+          } else {
+            this.setState({failure: true, loginDialogOpen: true})
+          }
+        }
+      })
+    } else {
+      if (this._ismounted) {
+        this.setState({failure: false, userName: '', password: '', loginDialogOpen: false})
+      }
+    }
+  }
+
+  handleChange = name => event => {
+    this.setState({
+      [name]: event.target.value
+    })
+  }
+
+  handleLogout() {
+    this.props.logout()
+  }
+
+  render() {
+    const { classes, user, variant, color, isLoggingIn } = this.props
+    const { failure } = this.state
+    if (user) {
+      return (
+        <div className={classes.root}>
+          <Typography color="inherit" variant="body1">
+            Welcome, {user.first_name} {user.last_name}
+          </Typography>
+          <Button
+            className={classes.button}
+            variant={variant} color={color}
+            onClick={this.handleLogout}
+          >Logout</Button>
+        </div>
+      )
+    } else {
+      return (
+        <div className={classes.root}>
+          <Button
+            className={isLoggingIn ? classes.buttonDisabled : classes.button} variant={variant} color={color} disabled={isLoggingIn}
+            onClick={() => this.setState({loginDialogOpen: true})}
+          >Login</Button>
+          <Dialog
+            open={this.state.loginDialogOpen}
+            onClose={() => this.handleLoginDialogClosed(false)}
+          >
+            <DialogTitle>Login</DialogTitle>
+            <DialogContent>
+              <DialogContentText>
+                To login, please enter your email address and password. If you
+                do not have an account, please go to the nomad repository and
+                create one.
+              </DialogContentText>
+              {isLoggingIn ? <LinearProgress/> : ''}
+              {failure ? <DialogContentText className={classes.errorText} color="error">Wrong username or password!</DialogContentText> : ''}
+              <FormGroup>
+                <TextField
+                  disabled={isLoggingIn}
+                  autoFocus
+                  margin="dense"
+                  id="uaseName"
+                  label="Email Address"
+                  type="email"
+                  fullWidth
+                  value={this.state.userName}
+                  onChange={this.handleChange('userName')}
+                />
+                <TextField
+                  disabled={isLoggingIn}
+                  margin="dense"
+                  id="password"
+                  label="Password"
+                  type="password"
+                  fullWidth
+                  value={this.state.password}
+                  onChange={this.handleChange('password')}
+                />
+              </FormGroup>
+            </DialogContent>
+            <DialogActions>
+              <Button onClick={() => this.handleLoginDialogClosed(false)} color="primary">
+                Cancel
+              </Button>
+              <Button onClick={() => this.handleLoginDialogClosed(true)} color="primary"
+                disabled={this.state.userName === '' || this.state.password === ''}
+              >
+                Login
+              </Button>
+            </DialogActions>
+          </Dialog>
+        </div>
+      )
+    }
+  }
+}
+
+export default compose(withApi(false), withStyles(LoginLogout.styles))(LoginLogout)
diff --git a/gui/src/components/Markdown.js b/gui/src/components/Markdown.js
index c9269c94c72cc22e4c0d8484b82f07d4434f8e46..c30432e74f478dd137fd37fff68b1588ade1bac6 100644
--- a/gui/src/components/Markdown.js
+++ b/gui/src/components/Markdown.js
@@ -1,7 +1,7 @@
 import React from 'react'
 import PropTypes from 'prop-types'
 import marked from 'marked'
-import { withStyles } from '@material-ui/core'
+import { withStyles, Typography } from '@material-ui/core'
 import extend from '@babel/runtime/helpers/extends'
 
 /**
@@ -13,7 +13,7 @@ import extend from '@babel/runtime/helpers/extends'
 var styles = theme => ({
   root: {
     fontFamily: theme.typography.fontFamily,
-    fontSize: 16,
+    fontSize: theme.typography.fontSize,
     color: theme.palette.text.primary,
     '& .anchor-link': {
       marginTop: -96,
@@ -23,7 +23,7 @@ var styles = theme => ({
     '& pre, & pre[class*="language-"]': {
       margin: '24px 0',
       padding: '12px 18px',
-      backgroundColor: theme.palette.background.paper,
+      backgroundColor: theme.palette.primary[50],
       borderRadius: theme.shape.borderRadius,
       overflow: 'auto',
       WebkitOverflowScrolling: 'touch' // iOS momentum scrolling.
@@ -35,34 +35,34 @@ var styles = theme => ({
       fontFamily: 'Consolas, "Liberation Mono", Menlo, Courier, monospace',
       padding: '3px 6px',
       color: theme.palette.text.primary,
-      backgroundColor: theme.palette.background.paper,
+      backgroundColor: theme.palette.primary[50],
       fontSize: 14
     },
     '& p code, & ul code, & pre code': {
       fontSize: 14,
       lineHeight: 1.6
     },
-    '& h1': (0, extend)({}, theme.typography.display2, {
-      color: theme.palette.text.secondary,
+    '& h1': (0, extend)({}, theme.typography.h3, {
+      color: theme.palette.text.primary,
       margin: '32px 0 16px'
     }),
-    '& .description': (0, extend)({}, theme.typography.headline, {
+    '& .description': (0, extend)({}, theme.typography.h5, {
       margin: '0 0 40px'
     }),
-    '& h2': (0, extend)({}, theme.typography.display1, {
-      color: theme.palette.text.secondary,
+    '& h2': (0, extend)({}, theme.typography.h4, {
+      color: theme.palette.text.primary,
       margin: '32px 0 24px'
     }),
-    '& h3': (0, extend)({}, theme.typography.headline, {
-      color: theme.palette.text.secondary,
+    '& h3': (0, extend)({}, theme.typography.h5, {
+      color: theme.palette.text.primary,
       margin: '32px 0 24px'
     }),
-    '& h4': (0, extend)({}, theme.typography.title, {
-      color: theme.palette.text.secondary,
+    '& h4': (0, extend)({}, theme.typography.h6, {
+      color: theme.palette.text.primary,
       margin: '24px 0 16px'
     }),
     '& p, & ul, & ol': {
-      lineHeight: 2
+      lineHeight: theme.typography.lineHeight
     },
     '& ul': {
       paddingLeft: 0,
@@ -76,7 +76,7 @@ var styles = theme => ({
           marginLeft: -theme.spacing.unit * 4,
           paddingRight: theme.spacing.unit * 4 - 14
         }
-      },
+      }
     },
 
     '& h1, & h2, & h3, & h4': {
@@ -206,12 +206,10 @@ function Markdown(props) {
   }
 
   return (
-    <div>
-      <div
-        className={classes.root}
-        dangerouslySetInnerHTML={{__html: marked(content)}}
-      />
-    </div>
+    <Typography variant="body1"
+      className={classes.root}
+      dangerouslySetInnerHTML={{__html: marked(content)}}
+    />
   )
 }
 
diff --git a/gui/src/components/Navigation.js b/gui/src/components/Navigation.js
index cdb0068b619e340c14f7fedbaa0323bf178a8580..44fe562ebe053dd72fd54102c85c9a6145c74adc 100644
--- a/gui/src/components/Navigation.js
+++ b/gui/src/components/Navigation.js
@@ -23,10 +23,11 @@ import ChevronLeftIcon from '@material-ui/icons/ChevronLeft'
 import MenuIcon from '@material-ui/icons/Menu'
 import { Link, withRouter } from 'react-router-dom'
 import { compose } from 'recompose'
-import { Avatar, MuiThemeProvider, IconButton } from '@material-ui/core'
-import { genTheme, repoTheme, archiveTheme, encTheme, appStaticBase, analyticsTheme } from '../config'
-import { ErrorSnacks } from './errors'
+import { MuiThemeProvider, IconButton, Checkbox, FormLabel } from '@material-ui/core'
+import { genTheme, repoTheme, archiveTheme, encTheme, analyticsTheme } from '../config'
 import classNames from 'classnames'
+import { HelpContext } from './help'
+import LoginLogout from './LoginLogout'
 
 const drawerWidth = 200
 
@@ -153,6 +154,20 @@ class Navigation extends React.Component {
     },
     menuItem: {
       paddingLeft: theme.spacing.unit * 3
+    },
+    barActions: {
+      display: 'flex',
+      alignItems: 'center'
+    },
+    barSelect: {
+      color: `${theme.palette.getContrastText(theme.palette.primary.main)} !important`
+    },
+    barButton: {
+      borderColor: theme.palette.getContrastText(theme.palette.primary.main),
+      marginRight: theme.spacing.unit * 4
+    },
+    barButtonDisabled: {
+      marginRight: theme.spacing.unit * 4
     }
   })
 
@@ -294,10 +309,21 @@ class Navigation extends React.Component {
                 >
                   <MenuIcon />
                 </IconButton>
-                <Typography variant="title" color="inherit" noWrap className={classes.flex}>
+                <Typography variant="h6" color="inherit" noWrap className={classes.flex}>
                   {selected(toolbarTitles)}
                 </Typography>
-                <Avatar src={`${appStaticBase}/me.jpg`}/>
+                <div className={classes.barActions}>
+                  <LoginLogout variant="outlined" color="inherit" classes={{button: classes.barButton, buttonDisabled: classes.barButtonDisabled}} />
+                  <FormLabel className={classes.barSelect} >Show help</FormLabel>
+                  <HelpContext.Consumer>{
+                    help => (
+                      <Checkbox
+                        checked={!help.someClosed()} indeterminate={!help.allClosed() && help.someClosed()}
+                        onClick={() => help.switchHelp()}
+                        classes={{root: classes.barSelect, checked: classes.barSelect}} />
+                    )
+                  }</HelpContext.Consumer>
+                </div>
               </Toolbar>
             </AppBar>
           </MuiThemeProvider>
@@ -305,9 +331,7 @@ class Navigation extends React.Component {
           <MuiThemeProvider theme={theme}>
             <main className={classes.content}>
               <div className={classes.toolbar} />
-              <ErrorSnacks>
-                {children}
-              </ErrorSnacks>
+              {children}
             </main>
           </MuiThemeProvider>
         </div>
diff --git a/gui/src/components/RawFiles.js b/gui/src/components/RawFiles.js
new file mode 100644
index 0000000000000000000000000000000000000000..1d5a43387fa15f1210c6bd39ce0ac89e7180d266
--- /dev/null
+++ b/gui/src/components/RawFiles.js
@@ -0,0 +1,113 @@
+import React from 'react'
+import PropTypes from 'prop-types'
+import { withStyles, FormGroup, FormControlLabel, Checkbox, FormLabel, IconButton, Divider } from '@material-ui/core'
+import DownloadIcon from '@material-ui/icons/CloudDownload'
+import FileSaver from 'file-saver'
+import { apiBase } from '../config'
+import { withApi } from './api'
+import { compose } from 'recompose'
+
+class RawFiles extends React.Component {
+  static propTypes = {
+    classes: PropTypes.object.isRequired,
+    uploadId: PropTypes.string.isRequired,
+    calcId: PropTypes.string.isRequired,
+    files: PropTypes.arrayOf(PropTypes.string).isRequired,
+    api: PropTypes.object.isRequired,
+    user: PropTypes.object
+  }
+
+  static styles = theme => ({
+    root: {},
+    formLabel: {
+      padding: theme.spacing.unit * 2
+    }
+  })
+
+  state = {
+    selectedFiles: []
+  }
+
+  label(file) {
+    return file.substring(file.lastIndexOf('/') + 1)
+  }
+
+  onSelectFile(file) {
+    const {selectedFiles} = this.state
+    const index = selectedFiles.indexOf(file)
+    if (index === -1) {
+      this.setState({selectedFiles: [file, ...selectedFiles]})
+    } else {
+      selectedFiles.splice(index, 1)
+      this.setState({selectedFiles: selectedFiles})
+    }
+  }
+
+  async onDownloadClicked() {
+    const {uploadId, calcId, api, user} = this.props
+    const files = this.state.selectedFiles
+    const downloadFile = files.length === 1 ? this.label(files[0]) : `${calcId}.zip`
+
+    let url
+    let token
+    if (user) {
+      token = (await api.getSignatureToken()).token
+      url = files.length === 1
+        ? `${apiBase}/raw/${uploadId}/${files[0]}?token=${token}`
+        : `${apiBase}/raw/${uploadId}?files=${encodeURIComponent(files.join(','))}&token=${token}`
+    } else {
+      url = files.length === 1
+        ? `${apiBase}/raw/${uploadId}/${files[0]}`
+        : `${apiBase}/raw/${uploadId}?files=${encodeURIComponent(files.join(','))}`
+    }
+
+    FileSaver.saveAs(url, downloadFile)
+  }
+
+  render() {
+    const {classes, files} = this.props
+    const {selectedFiles} = this.state
+    const someSelected = selectedFiles.length > 0
+    const allSelected = files.length === selectedFiles.length && someSelected
+
+    return (
+      <div className={classes.root}>
+        <FormGroup row>
+          <FormControlLabel
+            label="select all" style={{flexGrow: 1}}
+            control={
+              <Checkbox value="select_all" checked={allSelected}
+                indeterminate={!allSelected && someSelected}
+                onChange={() => this.setState({selectedFiles: allSelected ? [] : files.slice()})}
+              />
+            }
+          />
+          <FormLabel className={classes.formLabel}>
+            {selectedFiles.length}/{files.length} files selected
+          </FormLabel>
+          <IconButton
+            disabled={selectedFiles.length === 0}
+            onClick={() => this.onDownloadClicked()}
+          >
+            <DownloadIcon />
+          </IconButton>
+        </FormGroup>
+        <Divider />
+        <FormGroup row>
+          {files.map((file, index) => (
+            <FormControlLabel key={index} label={this.label(file)}
+              control={
+                <Checkbox
+                  checked={selectedFiles.indexOf(file) !== -1}
+                  onChange={() => this.onSelectFile(file)} value={file}
+                />
+              }
+            />
+          ))}
+        </FormGroup>
+      </div>
+    )
+  }
+}
+
+export default compose(withApi(false), withStyles(RawFiles.styles))(RawFiles)
diff --git a/gui/src/components/Repo.js b/gui/src/components/Repo.js
index bb8fc56d921d51db4bb2a88d17990afdf1f5aa3c..ba3e5fbffd99767210defdf7a9b2e759b2cf4c15 100644
--- a/gui/src/components/Repo.js
+++ b/gui/src/components/Repo.js
@@ -7,21 +7,20 @@ import TableCell from '@material-ui/core/TableCell'
 import TablePagination from '@material-ui/core/TablePagination'
 import TableRow from '@material-ui/core/TableRow'
 import Paper from '@material-ui/core/Paper'
-import api from '../api'
-import CalcLinks from './CalcLinks'
 import { TableHead, LinearProgress, FormControl, FormControlLabel, Checkbox, FormGroup,
-  FormLabel, IconButton, MuiThemeProvider } from '@material-ui/core'
-import Markdown from './Markdown'
+  FormLabel, IconButton, MuiThemeProvider, Typography } from '@material-ui/core'
 import { compose } from 'recompose'
 import { withErrors } from './errors'
 import AnalyticsIcon from '@material-ui/icons/Settings'
 import { analyticsTheme } from '../config'
 import Link from 'react-router-dom/Link'
+import { withApi } from './api'
 // import PeriodicTable from './PeriodicTable'
 
 class Repo extends React.Component {
   static propTypes = {
     classes: PropTypes.object.isRequired,
+    api: PropTypes.object.isRequired,
     raiseError: PropTypes.func.isRequired
   }
 
@@ -29,9 +28,11 @@ class Repo extends React.Component {
     root: {},
     data: {
       width: '100%',
-      marginTop: theme.spacing.unit * 3,
       overflowX: 'scroll'
     },
+    title: {
+      marginBottom: theme.spacing.unit * 4
+    },
     progressPlaceholder: {
       height: 5
     },
@@ -69,7 +70,7 @@ class Repo extends React.Component {
   update(page, rowsPerPage, owner) {
     this.setState({loading: true})
     owner = owner || this.state.owner
-    api.repoAll(page, rowsPerPage, owner).then(data => {
+    this.props.api.repoAll(page, rowsPerPage, owner).then(data => {
       const { pagination: { total, page, per_page }, results } = data
       this.setState({
         data: results,
@@ -116,9 +117,7 @@ class Repo extends React.Component {
     }
     return (
       <div className={classes.root}>
-        <Markdown>{`
-          ## The Repository – Raw Code Data
-        `}</Markdown>
+        <Typography variant="h4" className={classes.title}>The Repository – Raw Code Data</Typography>
         {/* <PeriodicTable/> */}
         <FormControl>
           <FormLabel>Filter calculations and only show: </FormLabel>
@@ -166,7 +165,7 @@ class Repo extends React.Component {
                     <TableCell padding="dense" key={rowIndex}>{calc[key]}</TableCell>
                   ))}
                   <TableCell padding="dense">
-                    <CalcLinks uploadHash={calc.upload_hash} calcHash={calc.calc_hash} />
+                    {/* <CalcLinks uploadId={calc.upload_id} calcId={calc.calc_id} /> */}
                   </TableCell>
                 </TableRow>
               ))}
@@ -198,4 +197,4 @@ class Repo extends React.Component {
   }
 }
 
-export default compose(withErrors, withStyles(Repo.styles))(Repo)
+export default compose(withApi(false), withErrors, withStyles(Repo.styles))(Repo)
diff --git a/gui/src/components/RepoCalc.js b/gui/src/components/RepoCalc.js
deleted file mode 100644
index 5ff97ed4cd8553e26e644a9eb80c5ea89110a804..0000000000000000000000000000000000000000
--- a/gui/src/components/RepoCalc.js
+++ /dev/null
@@ -1,63 +0,0 @@
-import React from 'react'
-import PropTypes from 'prop-types'
-import { withStyles, Paper, LinearProgress } from '@material-ui/core'
-import ReactJson from 'react-json-view'
-import api from '../api'
-import Markdown from './Markdown'
-import { withErrors } from './errors'
-import { compose } from 'recompose'
-
-class RepoCalc extends React.Component {
-  static propTypes = {
-    classes: PropTypes.object.isRequired,
-    raiseError: PropTypes.func.isRequired,
-    match: PropTypes.object.isRequired
-  }
-
-  static styles = theme => ({
-    root: {},
-    calcData: {
-      padding: theme.spacing.unit
-    }
-  });
-
-  constructor(props) {
-    super(props)
-    this.state = {
-      data: null
-    }
-  }
-
-  componentDidMount() {
-    const {uploadHash, calcHash} = this.props.match.params
-    api.repo(uploadHash, calcHash).then(data => {
-      this.setState({data: data})
-    }).catch(error => {
-      this.setState({data: null})
-      this.props.raiseError(error)
-    })
-  }
-
-  render() {
-    const { classes } = this.props
-    const { data } = this.state
-
-    return (
-      <div className={classes.root}>
-        <Markdown>{`
-          ## The Repository – Raw Code Data
-        `}</Markdown>
-        <Paper className={classes.calcData}>
-          {
-            data
-              ? <ReactJson src={this.state.data} enableClipboard={false} collapsed={4} />
-              : <LinearProgress variant="query" />
-          }
-        </Paper>
-      </div>
-
-    )
-  }
-}
-
-export default compose(withErrors, withStyles(RepoCalc.styles))(RepoCalc)
diff --git a/gui/src/components/Upload.js b/gui/src/components/Upload.js
index 1b354ad97361369d7e95580bb0cfda57d4ae12d7..033ee26c8e81ded5b91be391f7f3583836a1b1ee 100644
--- a/gui/src/components/Upload.js
+++ b/gui/src/components/Upload.js
@@ -8,12 +8,10 @@ import { withStyles, ExpansionPanel, ExpansionPanelSummary, Typography,
   TableSortLabel} from '@material-ui/core'
 import ExpandMoreIcon from '@material-ui/icons/ExpandMore'
 import ReactJson from 'react-json-view'
-import CalcLinks from './CalcLinks'
 import { compose } from 'recompose'
 import { withErrors } from './errors'
 import { debug } from '../config'
-import UploadCommand from './UploadCommand'
-import CalcProcLogPopper from './CalcProcLogPopper'
+import CalcDialog from './CalcDialog'
 
 class Upload extends React.Component {
   static propTypes = {
@@ -21,11 +19,14 @@ class Upload extends React.Component {
     raiseError: PropTypes.func.isRequired,
     upload: PropTypes.object.isRequired,
     checked: PropTypes.bool,
-    onCheckboxChanged: PropTypes.func
+    onCheckboxChanged: PropTypes.func,
+    onDoesNotExist: PropTypes.func
   }
 
   static styles = theme => ({
-    root: {},
+    root: {
+      marginBottom: theme.spacing.unit
+    },
     heading: {
       fontSize: theme.typography.pxToRem(15),
       fontWeight: theme.typography.fontWeightRegular
@@ -41,10 +42,18 @@ class Upload extends React.Component {
     detailsContent: {
       margin: theme.spacing.unit * 3
     },
+    titleContainer: {
+      flex: '0 0 auto',
+      marginRight: theme.spacing.unit * 2,
+      width: 350,
+      overflowX: 'hidden'
+    },
     title: {
-      flexBasis: '20%',
-      flexShrink: 0,
-      marginRight: theme.spacing.unit * 2
+      textOverflow: 'ellipsis',
+      whiteSpace: 'nowrap',
+      overflowX: 'inherit',
+      direction: 'rtl',
+      textAlign: 'left'
     },
     checkbox: {
       marginRight: theme.spacing.unit * 2
@@ -64,12 +73,8 @@ class Upload extends React.Component {
       alignItems: 'center',
       display: 'flex'
     },
-    logLink: {
-      color: theme.palette.secondary.main,
-      textDecoration: 'none',
-      '&:hover': {
-        textDecoration: 'underline'
-      }
+    clickableRow: {
+      cursor: 'pointer'
     }
   });
 
@@ -78,12 +83,13 @@ class Upload extends React.Component {
     params: {
       page: 1,
       perPage: 5,
-      orderBy: 'status',
+      orderBy: 'tasks_status',
       order: 'asc'
     },
-    archiveLogs: null, // archive id of archive to show logs for
+    archiveLogs: null, // { uploadId, calcId } ids of archive to show logs for
     loading: true, // its loading data from the server and the user should know about it
-    updating: true // it is still not complete and continieusly looking for updates
+    updating: true, // it is still not complete and continieusly looking for updates
+    openCalc: null // it is the select calc with open details dialog
   }
 
   _unmounted = false
@@ -95,10 +101,11 @@ class Upload extends React.Component {
 
     const {page, perPage, orderBy, order} = params
     this.setState({loading: true})
-    this.state.upload.get(page, perPage, orderBy, order)
+    this.state.upload.get(page, perPage, orderBy, order === 'asc' ? 1 : -1)
       .then(upload => {
+        const {tasks_running, process_running, current_task} = upload
         if (!this._unmounted) {
-          const continueUpdating = upload.status !== 'SUCCESS' && upload.status !== 'FAILURE' && !upload.is_stale
+          const continueUpdating = tasks_running || process_running || current_task === 'uploading'
           this.setState({upload: upload, loading: false, params: params, updating: continueUpdating})
           if (continueUpdating) {
             window.setTimeout(() => {
@@ -112,7 +119,11 @@ class Upload extends React.Component {
       .catch(error => {
         if (!this._unmounted) {
           this.setState({loading: false, ...params})
-          this.props.raiseError(error)
+          if (error.name === 'DoesNotExist') {
+            this.props.onDoesNotExist()
+          } else {
+            this.props.raiseError(error)
+          }
         }
       })
   }
@@ -121,6 +132,12 @@ class Upload extends React.Component {
     this.update(this.state.params)
   }
 
+  componentDidUpdate(prevProps) {
+    if (!prevProps.upload.process_running && this.props.upload.process_running) {
+      this.update(this.state.params)
+    }
+  }
+
   componentWillUnmount() {
     this._unmounted = true
   }
@@ -153,12 +170,12 @@ class Upload extends React.Component {
     const { name, create_time } = this.state.upload
 
     return (
-      <div className={classes.title}>
-        <Typography variant="title">
+      <div className={classes.titleContainer}>
+        <Typography variant="h6" className={classes.title}>
           {name || new Date(Date.parse(create_time)).toLocaleString()}
         </Typography>
         {name
-          ? <Typography variant="subheading">
+          ? <Typography variant="subtitle1">
             {new Date(Date.parse(create_time)).toLocaleString()}
           </Typography>
           : 'this upload has no name'
@@ -170,73 +187,119 @@ class Upload extends React.Component {
   renderStepper() {
     const { classes } = this.props
     const { upload } = this.state
-    const { calcs, tasks, current_task, status, errors, waiting } = upload
-
-    let activeStep = tasks.indexOf(current_task)
-    activeStep += (status === 'SUCCESS') ? 1 : 0
+    const { calcs, tasks, current_task, tasks_running, tasks_status, process_running, current_process, errors } = upload
+
+    // map tasks [ uploading, extracting, parse_all, cleanup ] to steps
+    const steps = [ 'upload', 'process', 'commit' ]
+    let step = null
+    const task_index = tasks.indexOf(current_task)
+    if (task_index === 0) {
+      step = 'upload'
+    } else if (task_index > 0 && tasks_running) {
+      step = 'process'
+    } else {
+      step = 'commit'
+    }
+    const stepIndex = steps.indexOf(step)
 
     const labelPropsFactories = {
-      uploading: (props) => {
-        props.children = 'uploading'
-        const { uploading } = upload
-        if (upload.status !== 'FAILURE') {
-          props.optional = (
-            <Typography variant="caption">
-              {waiting ? 'waiting for upload' : `${uploading || 0}%`}
-            </Typography>
-          )
+      upload: (props) => {
+        if (step === 'upload') {
+          props.children = 'uploading'
+          const { uploading } = upload
+          if (upload.tasks_status !== 'FAILURE') {
+            props.optional = (
+              <Typography variant="caption">
+                {`${uploading || 0}%`}
+              </Typography>
+            )
+          }
+        } else {
+          props.children = 'uploaded'
         }
       },
-      extracting: (props) => {
-        props.children = 'extracting'
+      process: (props) => {
+        props.error = tasks_status === 'FAILURE'
+
+        const processIndex = steps.indexOf('process')
+        if (stepIndex <= processIndex) {
+          props.children = 'processing'
+        } else {
+          props.children = 'processed'
+        }
+
         if (current_task === 'extracting') {
+          props.children = 'extracting'
           props.optional = (
             <Typography variant="caption">
               be patient
             </Typography>
           )
+        } else if (current_task === 'parse_all') {
+          props.children = 'parsing'
         }
-      },
-      parse_all: (props) => {
-        props.children = 'parse'
-        if (!calcs) {
-          props.optional = (
-            <Typography variant="caption" >
-              loading...
-            </Typography>
-          )
-        } else if (calcs.pagination.total > 0) {
-          const { total, successes, failures } = calcs.pagination
 
-          if (failures) {
-            props.error = true
+        if (stepIndex >= processIndex) {
+          if (!calcs) {
             props.optional = (
-              <Typography variant="caption" color="error">
-                {successes + failures}/{total}, {failures} failed
+              <Typography variant="caption" >
+                matching...
               </Typography>
             )
-          } else {
+          } else if (calcs.pagination.total > 0) {
+            const { total, successes, failures } = calcs.pagination
+            if (failures) {
+              props.error = true
+              props.optional = (
+                <Typography variant="caption" color="error">
+                  {successes + failures}/{total}, {failures} failed
+                </Typography>
+              )
+            } else {
+              props.optional = (
+                <Typography variant="caption">
+                  {successes + failures}/{total}
+                </Typography>
+              )
+            }
+          } else if (tasks_status === 'SUCCESS') {
+            props.error = true
             props.optional = (
-              <Typography variant="caption">
-                {successes + failures}/{total}
-              </Typography>
+              <Typography variant="caption" color="error">No calculations found.</Typography>
             )
           }
-        } else if (status === 'SUCCESS') {
-          props.error = true
+        }
+
+        if (tasks_status === 'FAILURE') {
           props.optional = (
-            <Typography variant="caption" color="error">No calculations found.</Typography>
+            <Typography variant="caption" color="error">
+              {errors.join(' ')}
+            </Typography>
           )
         }
+      },
+      commit: (props) => {
+        props.children = 'inspect'
+
+        if (process_running) {
+          if (current_process === 'commit_upload') {
+            props.children = 'approved'
+            props.optional = <Typography variant="caption">moving data ...</Typography>
+          } else if (current_process === 'delete_upload') {
+            props.children = 'declined'
+            props.optional = <Typography variant="caption">deleting data ...</Typography>
+          }
+        } else {
+          props.optional = <Typography variant="caption">commit or delete</Typography>
+        }
       }
     }
 
     return (
-      <Stepper activeStep={activeStep} classes={{root: classes.stepper}}>
-        {tasks.map((label, index) => {
+      <Stepper activeStep={steps.indexOf(step)} classes={{root: classes.stepper}}>
+        {steps.map((label, index) => {
           const labelProps = {
-            children: label,
-            error: activeStep === index && status === 'FAILURE'
+            children: label
           }
 
           const labelPropsFactory = labelPropsFactories[label]
@@ -244,14 +307,6 @@ class Upload extends React.Component {
             labelPropsFactory(labelProps)
           }
 
-          if (labelProps.error && status === 'FAILURE') {
-            labelProps.optional = (
-              <Typography variant="caption" color="error">
-                {errors.join(' ')}
-              </Typography>
-            )
-          }
-
           return (
             <Step key={label}>
               <StepLabel {...labelProps} />
@@ -265,20 +320,22 @@ class Upload extends React.Component {
   renderCalcTable() {
     const { classes } = this.props
     const { page, perPage, orderBy, order } = this.state.params
-    const { calcs, status, waiting, upload_command } = this.state.upload
+    const { calcs, tasks_status, waiting } = this.state.upload
     const { pagination, results } = calcs
 
     if (pagination.total === 0) {
-      if (this.state.upload.completed) {
+      if (!this.state.upload.tasks_running) {
         return (
           <Typography className={classes.detailsContent}>
-            {status === 'SUCCESS' ? 'No calculcations found.' : 'No calculations to show.'}
+            {tasks_status === 'SUCCESS' ? 'No calculcations found.' : 'No calculations to show.'}
           </Typography>
         )
       } else {
         if (waiting) {
           return (
-            <UploadCommand uploadCommand={upload_command} />
+            <Typography className={classes.detailsContent}>
+                Uploading ...
+            </Typography>
           )
         } else {
           return (
@@ -291,16 +348,20 @@ class Upload extends React.Component {
     }
 
     const renderRow = (calc, index) => {
-      const { mainfile, archive_id, parser, tasks, current_task, status, errors } = calc
-      const color = status === 'FAILURE' ? 'error' : 'default'
+      const { mainfile, calc_id, upload_id, parser, tasks, current_task, tasks_status, errors } = calc
+      const color = tasks_status === 'FAILURE' ? 'error' : 'default'
+      const processed = tasks_status === 'FAILURE' || tasks_status === 'SUCCESS'
       const row = (
-        <TableRow key={index}>
+        <TableRow key={index} hover={processed}
+          onClick={() => this.setState({openCalc: processed ? {uploadId: upload_id, calcId: calc_id} : null})}
+          className={processed ? classes.clickableRow : null} >
+
           <TableCell>
             <Typography color={color}>
               {mainfile}
             </Typography>
             <Typography variant="caption" color={color}>
-              {archive_id}
+              {upload_id}/{calc_id}
             </Typography>
           </TableCell>
           <TableCell>
@@ -320,25 +381,14 @@ class Upload extends React.Component {
             </Typography>
           </TableCell>
           <TableCell>
-            <Typography color={color}>
-              {(status === 'SUCCESS' || status === 'FAILURE')
-                ?
-                  <a className={classes.logLink} href="#" onClick={() => this.setState({archiveLogs:  archive_id})}>
-                  {status.toLowerCase()}
-                  </a>
-                : status.toLowerCase()
-              }
-            </Typography>
-          </TableCell>
-          <TableCell>
-            <CalcLinks calcId={archive_id} disabled={status !== 'SUCCESS'} />
+            <Typography color={color}>{tasks_status.toLowerCase()}</Typography>
           </TableCell>
         </TableRow>
       )
 
-      if (status === 'FAILURE') {
+      if (tasks_status === 'FAILURE') {
         return (
-          <Tooltip key={archive_id} title={errors.map((error, index) => (<p key={`${archive_id}-${index}`}>{error}</p>))}>
+          <Tooltip key={calc_id} title={errors.map((error, index) => (<p key={`${calc_id}-${index}`}>{error}</p>))}>
             {row}
           </Tooltip>
         )
@@ -354,8 +404,7 @@ class Upload extends React.Component {
       { id: 'mainfile', sort: true, label: 'mainfile' },
       { id: 'parser', sort: true, label: 'code' },
       { id: 'task', sort: false, label: 'task' },
-      { id: 'status', sort: true, label: 'status' },
-      { id: 'links', sort: false, label: 'links' }
+      { id: 'tasks_status', sort: true, label: 'status' }
     ]
 
     return (
@@ -391,6 +440,7 @@ class Upload extends React.Component {
               <TableCell colSpan={6} />
             </TableRow>
           )}
+
           <TableRow>
             <TablePagination
               count={total}
@@ -405,33 +455,19 @@ class Upload extends React.Component {
     )
   }
 
-  renderLogs() {
-    if (this.state.archiveLogs) {
-      return (
-        <CalcProcLogPopper
-          open={true}
-          onClose={() => this.setState({archiveLogs: null})}
-          anchorEl={window.parent.document.documentElement.firstElementChild}
-          raiseError={this.props.raiseError}
-          archiveId={this.state.archiveLogs}
-        />
-      )
-    } else {
-      return ''
-    }
-  }
-
   render() {
-    const { classes } = this.props
-    const { upload } = this.state
+    const { classes, raiseError } = this.props
+    const { upload, openCalc } = this.state
 
     if (this.state.upload) {
       return (
-        <div ref={this.logPopperAnchor}>
+        <div className={classes.root}>
+          { openCalc ? <CalcDialog raiseError={raiseError} {...openCalc} onClose={() => this.setState({openCalc: null})} /> : ''}
+
           <ExpansionPanel>
             <ExpansionPanelSummary
               expandIcon={<ExpandMoreIcon/>} classes={{root: classes.summary}}>
-              {!(upload.completed || upload.waiting)
+              {(upload.tasks_running || upload.process_running)
                 ? <div className={classes.progress}>
                   <CircularProgress size={32}/>
                 </div>
@@ -455,7 +491,6 @@ class Upload extends React.Component {
               {this.state.loading && !this.state.updating ? <LinearProgress/> : ''}
             </ExpansionPanelDetails>
           </ExpansionPanel>
-          {this.renderLogs()}
         </div>
       )
     } else {
diff --git a/gui/src/components/UploadCommand.js b/gui/src/components/UploadCommand.js
deleted file mode 100644
index b09045d538a291cfc5ab24db1a03fd952d2da654..0000000000000000000000000000000000000000
--- a/gui/src/components/UploadCommand.js
+++ /dev/null
@@ -1,32 +0,0 @@
-import React from 'react'
-import PropTypes from 'prop-types'
-import { Typography, withStyles } from '@material-ui/core'
-
-class UploadCommand extends React.Component {
-  static propTypes = {
-    classes: PropTypes.object.isRequired,
-    uploadCommand: PropTypes.string.isRequired
-  }
-
-  static styles = theme => ({
-    root: {
-      margin: theme.spacing.unit * 2
-    },
-    uploadCommand: {
-      fontFamily: '\'Roboto mono\', monospace',
-      marginTop: theme.spacing.unit * 2
-    }
-  })
-
-  render() {
-    const { classes, uploadCommand } = this.props
-    return (
-      <div className={classes.root}>
-        <Typography>Copy and use the following command. Don't forget to replace the file name.:</Typography>
-        <Typography className={classes.uploadCommand}>{uploadCommand}</Typography>
-      </div>
-    )
-  }
-}
-
-export default withStyles(UploadCommand.styles)(UploadCommand)
diff --git a/gui/src/components/Uploads.js b/gui/src/components/Uploads.js
index 8ce6b3a0f826928fa60bbd4ee5640e6b18349e0d..d36a3697c47af051cd70c615ecde563dd8abe24b 100644
--- a/gui/src/components/Uploads.js
+++ b/gui/src/components/Uploads.js
@@ -2,23 +2,23 @@ import React from 'react'
 import PropTypes from 'prop-types'
 import Markdown from './Markdown'
 import { withStyles, Paper, IconButton, FormGroup, Checkbox, FormControlLabel, FormLabel,
-  LinearProgress, InputLabel, Input, FormHelperText, Button, Popover, Grid, Typography,
-  DialogContent, DialogActions} from '@material-ui/core'
+  LinearProgress,
+  Typography} from '@material-ui/core'
 import UploadIcon from '@material-ui/icons/CloudUpload'
 import Dropzone from 'react-dropzone'
-import api from '../api'
 import Upload from './Upload'
-import { withErrors } from './errors'
 import { compose } from 'recompose'
 import DeleteIcon from '@material-ui/icons/Delete'
+import ReloadIcon from '@material-ui/icons/Cached'
 import CheckIcon from '@material-ui/icons/Check'
-import AddIcon from '@material-ui/icons/Add'
-import UploadCommand from './UploadCommand'
 import ConfirmDialog from './ConfirmDialog'
+import { Help } from './help'
+import { withApi } from './api'
 
 class Uploads extends React.Component {
   static propTypes = {
     classes: PropTypes.object.isRequired,
+    api: PropTypes.object.isRequired,
     raiseError: PropTypes.func.isRequired
   }
 
@@ -27,7 +27,9 @@ class Uploads extends React.Component {
       width: '100%'
     },
     dropzoneContainer: {
-      height: 192
+      height: 192,
+      marginTop: theme.spacing.unit * 2,
+      marginBottom: theme.spacing.unit * 2
     },
     dropzone: {
       textAlign: 'center',
@@ -62,49 +64,29 @@ class Uploads extends React.Component {
     },
     uploads: {
       marginTop: theme.spacing.unit * 2
-    },
-    uploadFormControl: {
-      margin: theme.spacing.unit * 2
-    },
-    button: {
-      margin: theme.spacing.unit
-    },
-    rightIcon: {
-      marginLeft: theme.spacing.unit
-    },
-    uploadNameInput: {
-      width: '100%'
-    },
-    uploadKindHeading: {
-      paddingBottom: theme.spacing.unit
-    },
-    uploadKindDescription: {
-      paddingTop: theme.spacing.unit,
-      paddingBottom: theme.spacing.unit * 2
-    },
-    commandUpload: {
-      height: 192
     }
   })
 
   state = {
     uploads: null,
+    uploadCommand: 'loading ...',
     selectedUploads: [],
     loading: true,
-    showAccept: false,
-    uploadName: '',
-    uploadCommand: null,
-    showUploadCommand: false,
-    uploadPopperAnchor: null
+    showAccept: false
   }
 
   componentDidMount() {
     this.update()
+    this.props.api.getUploadCommand()
+      .then(command => this.setState({uploadCommand: command}))
+      .catch(error => {
+        this.props.raiseError(error)
+      })
   }
 
   update() {
     this.setState({loading: true})
-    api.getUploads()
+    this.props.api.getUploads()
       .then(uploads => {
         const filteredUploads = uploads.filter(upload => !upload.is_state)
         this.setState({uploads: filteredUploads, selectedUploads: [], loading: false})
@@ -115,34 +97,9 @@ class Uploads extends React.Component {
       })
   }
 
-  onCreateUploadCmdClicked(event) {
-    event.persist()
-    const existingUpload = this.state.uploads
-      .find(upload => upload.name === this.state.uploadName && upload.waiting)
-    if (existingUpload) {
-      const upload = existingUpload
-      this.setState({
-        uploadCommand: upload.upload_command,
-        showUploadCommand: true,
-        uploadPopperAnchor: event.target})
-    } else {
-      api.createUpload(this.state.uploadName)
-        .then(upload => {
-          this.setState({
-            uploads: [...this.state.uploads, upload],
-            uploadCommand: upload.upload_command,
-            showUploadCommand: true,
-            uploadPopperAnchor: event.target})
-        })
-        .catch(error => {
-          this.props.raiseError(error)
-        })
-    }
-  }
-
   onDeleteClicked() {
     this.setState({loading: true})
-    Promise.all(this.state.selectedUploads.map(upload => api.deleteUpload(upload.upload_id)))
+    Promise.all(this.state.selectedUploads.map(upload => this.props.api.deleteUpload(upload.upload_id)))
       .then(() => this.update())
       .catch(error => {
         this.props.raiseError(error)
@@ -156,7 +113,7 @@ class Uploads extends React.Component {
 
   handleAccept() {
     this.setState({loading: true})
-    Promise.all(this.state.selectedUploads.map(upload => api.unstageUpload(upload.upload_id)))
+    Promise.all(this.state.selectedUploads.map(upload => this.props.api.commitUpload(upload.upload_id)))
       .then(() => {
         this.setState({showAccept: false})
         return this.update()
@@ -167,15 +124,22 @@ class Uploads extends React.Component {
       })
   }
 
+  sortedUploads() {
+    return this.state.uploads.concat()
+      .sort((a, b) => (a.gui_upload_id === b.gui_upload_id) ? 0 : ((a.gui_upload_id < b.gui_upload_id) ? -1 : 1))
+  }
+
+  handleDoesNotExist(nonExistingUupload) {
+    this.setState({
+      uploads: this.state.uploads.filter(upload => upload !== nonExistingUupload)
+    })
+  }
+
   onDrop(files) {
     files.forEach(file => {
-      api.createUpload(file.name)
-        .then(upload => {
-          this.setState({uploads: [...this.state.uploads, upload]})
-          upload.uploadFile(file)
-            .catch(this.props.raiseError)
-        })
-        .catch(this.props.raiseError)
+      const upload = this.props.api.createUpload(file.name)
+      this.setState({uploads: [...this.state.uploads, upload]})
+      upload.uploadFile(file).catch(this.props.raiseError)
     })
   }
 
@@ -191,7 +155,7 @@ class Uploads extends React.Component {
 
   onSelectionAllChanged(checked) {
     if (checked) {
-      this.setState({selectedUploads: [...this.state.uploads.filter(upload => upload.completed)]})
+      this.setState({selectedUploads: [...this.state.uploads.filter(upload => !upload.tasks_running)]})
     } else {
       this.setState({selectedUploads: []})
     }
@@ -199,134 +163,101 @@ class Uploads extends React.Component {
 
   renderUploads() {
     const { classes } = this.props
-    const { uploads, selectedUploads } = this.state
+    const { selectedUploads } = this.state
+    const uploads = this.state.uploads || []
 
-    if (uploads && uploads.length > 0) {
-      return (
-        <div>
-          <div style={{width: '100%'}}>
-            <Markdown text={'These are the *existing* uploads:'} />
-            <FormGroup className={classes.selectFormGroup} row>
-              <FormControlLabel label="all" style={{flexGrow: 1}} control={(
-                <Checkbox
-                  checked={selectedUploads.length === uploads.length}
-                  onChange={(_, checked) => this.onSelectionAllChanged(checked)}
-                />
-              )} />
-              <FormLabel classes={{root: classes.selectLabel}}>
-                {`selected uploads ${selectedUploads.length}/${uploads.length}`}
-              </FormLabel>
-              <IconButton
-                disabled={selectedUploads.length === 0}
-                onClick={this.onDeleteClicked.bind(this)}
-              >
-                <DeleteIcon />
-              </IconButton>
+    return (<div>
+      <div style={{width: '100%'}}>
+        <FormGroup className={classes.selectFormGroup} row>
+          <FormControlLabel label="all" style={{flexGrow: 1}} control={(
+            <Checkbox
+              checked={selectedUploads.length === uploads.length && uploads.length !== 0}
+              onChange={(_, checked) => this.onSelectionAllChanged(checked)}
+            />
+          )} />
+          <IconButton onClick={() => this.update()}><ReloadIcon /></IconButton>
+          <FormLabel classes={{root: classes.selectLabel}}>
+            {`selected uploads ${selectedUploads.length}/${uploads.length}`}
+          </FormLabel>
+          <IconButton
+            disabled={selectedUploads.length === 0}
+            onClick={this.onDeleteClicked.bind(this)}
+          >
+            <DeleteIcon />
+          </IconButton>
 
-              <IconButton disabled={selectedUploads.length === 0} onClick={this.onAcceptClicked.bind(this)}>
-                <CheckIcon />
-              </IconButton>
-              <ConfirmDialog open={this.state.showAccept} onClose={() => this.setState({showAccept: false})} onOk={this.handleAccept.bind(this)}>
-                If you agree the selected uploads will move out of your private staging area into the public nomad.
-              </ConfirmDialog>
+          <IconButton disabled={selectedUploads.length === 0} onClick={this.onAcceptClicked.bind(this)}>
+            <CheckIcon />
+          </IconButton>
+          <ConfirmDialog open={this.state.showAccept} onClose={() => this.setState({showAccept: false})} onOk={this.handleAccept.bind(this)}>
+              If you agree the selected uploads will move out of your private staging area into the public nomad.
+          </ConfirmDialog>
 
-            </FormGroup>
-          </div>
-          <div className={classes.uploads}>
-            {this.state.uploads.map((upload) => (
-              <Upload key={upload.upload_id} upload={upload}
-                checked={selectedUploads.indexOf(upload) !== -1}
-                onCheckboxChanged={checked => this.onSelectionChanged(upload, checked)}/>
-            ))}
-          </div>
-        </div>
-      )
-    } else {
-      return ''
-    }
+        </FormGroup>
+      </div>
+      <div className={classes.uploads}>{
+        (uploads.length > 0)
+          ? (
+            <div>
+              <Help cookie="uploadList">{`
+                These are all your existing not commiting uploads. You can see how processing
+                progresses and review your uploads before commiting them to the *nomad repository*.
+
+                Select uploads to delete or commit them. Click on uploads to see individual
+                calculations. Click on calculations to see more details on each calculation.
+              `}</Help>
+              {
+                this.sortedUploads().map(upload => (
+                  <Upload key={upload.gui_upload_id} upload={upload}
+                    checked={selectedUploads.indexOf(upload) !== -1}
+                    onDoesNotExist={() => this.handleDoesNotExist(upload)}
+                    onCheckboxChanged={checked => this.onSelectionChanged(upload, checked)}/>
+                ))
+              }
+            </div>
+          ) : ''
+      }</div>
+    </div>)
   }
 
   render() {
     const { classes } = this.props
-    const { showUploadCommand, uploadCommand, uploadPopperAnchor } = this.state
+    const { uploadCommand } = this.state
 
     return (
       <div className={classes.root}>
-        <Markdown>{`
-          ## Upload your own data
+        <Typography variant="h4">Upload your own data</Typography>
+        <Help cookie="uploadHelp" component={Markdown}>{`
           You can upload your own data. Have your code output ready in a popular archive
           format (e.g. \`*.zip\` or \`*.tar.gz\`).  Your upload can
           comprise the output of multiple runs, even of different codes. Don't worry, nomad
-          will find it.`}
-        </Markdown>
-        <Grid container spacing={24}>
-          <Grid item xs>
-            <Typography variant="headline" className={classes.uploadKindHeading}>
-              Browser upload
-            </Typography>
-            <Paper className={classes.dropzoneContainer}>
-              <Dropzone
-                accept="application/zip"
-                className={classes.dropzone}
-                activeClassName={classes.dropzoneAccept}
-                rejectClassName={classes.dropzoneReject}
-                onDrop={this.onDrop.bind(this)}
-              >
-                <p>drop files here</p>
-                <UploadIcon style={{fontSize: 36}}/>
-              </Dropzone>
-            </Paper>
-            <Typography className={classes.uploadKindDescription}>
-              Just drop your file above. You know this from many other services in the internet.
-            </Typography>
-          </Grid>
-          <Grid item xs>
-            <Typography variant="headline" className={classes.uploadKindHeading}>
-              Command upload
-            </Typography>
-            <Paper className={classes.commandUpload}>
-              <DialogContent>
-                <InputLabel htmlFor="name-helper">Upload name</InputLabel>
-                <Input className={classes.uploadNameInput}
-                  id="name-helper" value={this.state.uploadName}
-                  onChange={(event) => this.setState({uploadName: event.target.value})}
-                />
-                <FormHelperText id="name-helper-text">optional, helps to track the upload</FormHelperText>
-              </DialogContent>
-              <DialogActions>
-                <Button
-                  color="primary" className={classes.button} variant="contained"
-                  onClick={this.onCreateUploadCmdClicked.bind(this)}
-                >
-                    add upload
-                  <AddIcon className={classes.rightIcon}/>
-                </Button>
-                <Popover
-                  id="upload-command-popper"
-                  onClose={() => this.setState({showUploadCommand: false})}
-                  open={showUploadCommand}
-                  anchorEl={uploadPopperAnchor}
-                  anchorOrigin={{
-                    vertical: 'bottom',
-                    horizontal: 'center'
-                  }}
-                  transformOrigin={{
-                    vertical: 'top',
-                    horizontal: 'center'
-                  }}
-                >
-                  <UploadCommand uploadCommand={uploadCommand} />
-                </Popover>
-              </DialogActions>
-            </Paper>
-            <Typography className={classes.uploadKindDescription}>
-              You can upload your file via <strong>curl</strong>. Optionally, you
-              can provide a name that will help to track different uploads.
-              Without a name, you only have the upload time to follow your uploads.
-              You can find the command by unfolding the new upload element.
-            </Typography>
-          </Grid>
-        </Grid>
+          will find it, just drop it below:
+        `}</Help>
+
+        <Paper className={classes.dropzoneContainer}>
+          <Dropzone
+            accept={['application/zip', 'application/gzip', 'application/bz2']}
+            className={classes.dropzone}
+            activeClassName={classes.dropzoneAccept}
+            rejectClassName={classes.dropzoneReject}
+            onDrop={this.onDrop.bind(this)}
+          >
+            <p>drop files here</p>
+            <UploadIcon style={{fontSize: 36}}/>
+          </Dropzone>
+        </Paper>
+
+        <Help cookie="uploadCommandHelp">{`
+          Alternatively, you can upload files via the following shell command.
+          Replace \`<local_file>\` with your file. After executing the command,
+          return here and reload.
+        `}</Help>
+
+        <Markdown>{`
+          \`\`\`
+            ${uploadCommand}
+          \`\`\`
+        `}</Markdown>
 
         {this.renderUploads()}
         {this.state.loading ? <LinearProgress/> : ''}
@@ -335,4 +266,4 @@ class Uploads extends React.Component {
   }
 }
 
-export default compose(withErrors, withStyles(Uploads.styles))(Uploads)
+export default compose(withApi(true), withStyles(Uploads.styles))(Uploads)
diff --git a/gui/src/components/api.js b/gui/src/components/api.js
new file mode 100644
index 0000000000000000000000000000000000000000..0b909f064e7bed3bc58c7ac1411d0452ecc3f583
--- /dev/null
+++ b/gui/src/components/api.js
@@ -0,0 +1,409 @@
+import React from 'react'
+import PropTypes, { instanceOf } from 'prop-types'
+import { withErrors } from './errors'
+import { UploadRequest } from '@navjobs/upload'
+import Swagger from 'swagger-client'
+import { apiBase } from '../config'
+import { Typography, withStyles, LinearProgress } from '@material-ui/core'
+import LoginLogout from './LoginLogout'
+import { Cookies, withCookies } from 'react-cookie'
+import { compose } from 'recompose'
+
+const ApiContext = React.createContext()
+
+export class DoesNotExist extends Error {
+  constructor(msg) {
+    super(msg)
+    this.name = 'DoesNotExist'
+  }
+}
+
+const upload_to_gui_ids = {}
+let gui_upload_id_counter = 0
+
+class Upload {
+  constructor(json, api) {
+    this.api = api
+    this.handleApiError = api.handleApiError.bind(api)
+
+    // Cannot use upload_id as key in GUI, because uploads don't have an upload_id
+    // before upload is completed
+    if (json.upload_id) {
+      // instance from the API
+      this.gui_upload_id = upload_to_gui_ids[json.upload_id]
+      if (this.gui_upload_id === undefined) {
+        // never seen in the GUI, needs a GUI id
+        this.gui_upload_id = gui_upload_id_counter++
+        upload_to_gui_ids[json.upload_id] = this.gui_upload_id
+      }
+    } else {
+      // new instance, not from the API
+      this.gui_upload_id = gui_upload_id_counter++
+    }
+    Object.assign(this, json)
+  }
+
+  uploadFile(file) {
+    const uploadFileWithProgress = async() => {
+      let uploadRequest = await UploadRequest(
+        {
+          request: {
+            url: `${apiBase}/uploads/?name=${this.name}`,
+            method: 'PUT',
+            headers: {
+              'Content-Type': 'application/gzip',
+              ...this.api.auth_headers
+            }
+          },
+          files: [file],
+          progress: value => {
+            this.uploading = value
+          }
+        }
+      )
+      if (uploadRequest.error) {
+        this.handleApiError(uploadRequest.error)
+      }
+      if (uploadRequest.aborted) {
+        throw Error('User abort')
+      }
+      this.uploading = 100
+      this.upload_id = uploadRequest.response.upload_id
+      upload_to_gui_ids[this.upload_id] = this.gui_upload_id
+    }
+
+    return uploadFileWithProgress()
+      .then(() => this)
+  }
+
+  get(page, perPage, orderBy, order) {
+    if (this.uploading !== null && this.uploading !== 100) {
+      return new Promise(resolve => resolve(this))
+    } else {
+      if (this.upload_id) {
+        return this.api.swaggerPromise.then(client => client.apis.uploads.get_upload({
+          upload_id: this.upload_id,
+          page: page || 1,
+          per_page: perPage || 5,
+          order_by: orderBy || 'mainfile',
+          order: order || -1
+        }))
+          .catch(this.handleApiError)
+          .then(response => response.body)
+          .then(uploadJson => {
+            Object.assign(this, uploadJson)
+            return this
+          })
+      } else {
+        return new Promise(resolve => resolve(this))
+      }
+    }
+  }
+}
+
+class Api {
+  static async createSwaggerClient(userNameToken, password) {
+    let data
+    if (userNameToken) {
+      let auth = {
+        'X-Token': userNameToken
+      }
+      if (password) {
+        auth = {
+          'HTTP Basic': {
+            username: userNameToken,
+            password: password
+          }
+        }
+      }
+      data = {authorizations: auth}
+    }
+
+    return Swagger(`${apiBase}/swagger.json`, data)
+  }
+
+  constructor(user) {
+    user = user || {}
+    this.auth_headers = {
+      'X-Token': user.token
+    }
+    this.swaggerPromise = Api.createSwaggerClient(user.token)
+
+    this.handleApiError = this.handleApiError.bind(this)
+  }
+
+  handleApiError(e) {
+    if (e.response) {
+      const body = e.response.body
+      const message = (body && body.message) ? body.message : e.response.statusText
+      if (e.response.status === 404) {
+        throw new DoesNotExist(message)
+      } else {
+        throw Error(`API error (${e.response.status}): ${message}`)
+      }
+    } else {
+      throw Error('Network related error, cannot reach API')
+    }
+  }
+
+  createUpload(name) {
+    return new Upload({
+      name: name,
+      tasks: ['uploading', 'extract', 'parse_all', 'cleanup'],
+      current_task: 'uploading',
+      uploading: 0,
+      create_time: new Date()
+    }, this)
+  }
+
+  async getUploads() {
+    const client = await this.swaggerPromise
+    return client.apis.uploads.get_uploads()
+      .catch(this.handleApiError)
+      .then(response => response.body.map(uploadJson => {
+        const upload = new Upload(uploadJson, this)
+        upload.uploading = 100
+        return upload
+      }))
+  }
+
+  async archive(uploadId, calcId) {
+    const client = await this.swaggerPromise
+    return client.apis.archive.get_archive_calc({
+      upload_id: uploadId,
+      calc_id: calcId
+    })
+      .catch(this.handleApiError)
+      .then(response => response.body)
+  }
+
+  async calcProcLog(uploadId, calcId) {
+    const client = await this.swaggerPromise
+    return client.apis.archive.get_archive_logs({
+      upload_id: uploadId,
+      calc_id: calcId
+    })
+      .catch(this.handleApiError)
+      .then(response => response.text)
+  }
+
+  async repo(uploadId, calcId) {
+    const client = await this.swaggerPromise
+    return client.apis.repo.get_repo_calc({
+      upload_id: uploadId,
+      calc_id: calcId
+    })
+      .catch(this.handleApiError)
+      .then(response => response.body)
+  }
+
+  async repoAll(page, perPage, owner) {
+    const client = await this.swaggerPromise
+    return client.apis.repo.get_calcs({
+      page: page,
+      per_page: perPage,
+      ower: owner || 'all'
+    })
+      .catch(this.handleApiError)
+      .then(response => response.body)
+  }
+
+  async deleteUpload(uploadId) {
+    const client = await this.swaggerPromise
+    return client.apis.uploads.delete_upload({upload_id: uploadId})
+      .catch(this.handleApiError)
+      .then(response => response.body)
+  }
+
+  async commitUpload(uploadId) {
+    const client = await this.swaggerPromise
+    return client.apis.uploads.exec_upload_command({
+      upload_id: uploadId,
+      payload: {
+        command: 'commit'
+      }
+    })
+      .catch(this.handleApiError)
+      .then(response => response.body)
+  }
+
+  async getSignatureToken() {
+    const client = await this.swaggerPromise
+    return client.apis.auth.get_token()
+      .catch(this.handleApiError)
+      .then(response => response.body)
+  }
+
+  _cachedMetaInfo = null
+
+  async getMetaInfo() {
+    if (this._cachedMetaInfo) {
+      return this._cachedMetaInfo
+    } else {
+      const loadMetaInfo = async(path) => {
+        const client = await this.swaggerPromise
+        return client.apis.archive.get_metainfo({metainfo_path: path})
+          .catch(this.handleApiError)
+          .then(response => response.body)
+          .then(data => {
+            if (!this._cachedMetaInfo) {
+              this._cachedMetaInfo = {
+                loadedDependencies: {}
+              }
+            }
+            this._cachedMetaInfo.loadedDependencies[path] = true
+            if (data.metaInfos) {
+              data.metaInfos.forEach(info => {
+                this._cachedMetaInfo[info.name] = info
+                info.relativePath = path
+              })
+            }
+            if (data.dependencies) {
+              data.dependencies
+                .filter(dep => this._cachedMetaInfo.loadedDependencies[dep.relativePath] !== true)
+                .forEach(dep => {
+                  loadMetaInfo(dep.relativePath)
+                })
+            }
+          })
+      }
+      await loadMetaInfo('all.nomadmetainfo.json')
+      return this._cachedMetaInfo
+    }
+  }
+
+  async getUploadCommand() {
+    const client = await this.swaggerPromise
+    return client.apis.uploads.get_upload_command()
+      .catch(this.handleApiError)
+      .then(response => response.body.upload_command)
+  }
+}
+
+export class ApiProviderComponent extends React.Component {
+  static propTypes = {
+    children: PropTypes.oneOfType([
+      PropTypes.arrayOf(PropTypes.node),
+      PropTypes.node
+    ]).isRequired,
+    cookies: instanceOf(Cookies).isRequired,
+    raiseError: PropTypes.func.isRequired
+  }
+
+  componentDidMount() {
+    const token = this.props.cookies.get('token')
+    if (token) {
+      this.state.login(token)
+    }
+  }
+
+  state = {
+    api: new Api(),
+    user: null,
+    isLoggingIn: false,
+    login: (userNameToken, password, successCallback) => {
+      this.setState({isLoggingIn: true})
+      successCallback = successCallback || (() => true)
+      Api.createSwaggerClient(userNameToken, password)
+        .catch(this.state.api.handleApiError)
+        .then(client => {
+          client.apis.auth.get_user()
+            .catch(error => {
+              if (error.response.status !== 401) {
+                try {
+                  this.handleApiError(error)
+                } catch (e) {
+                  this.setState({isLoggingIn: false, user: null})
+                  this.props.raiseError(error)
+                }
+              } else {
+                this.setState({isLoggingIn: false})
+              }
+            })
+            .then(response => {
+              if (response) {
+                const user = response.body
+                this.setState({api: new Api(user), user: user})
+                this.props.cookies.set('token', user.token)
+                successCallback(true)
+              } else {
+                successCallback(false)
+              }
+              this.setState({isLoggingIn: false})
+            })
+        })
+        .catch(error => {
+          this.setState({isLoggingIn: false, user: null})
+          this.props.raiseError(error)
+        })
+    },
+    logout: () => {
+      this.setState({api: new Api(), user: null})
+      this.props.cookies.set('token', undefined)
+    }
+  }
+
+  render() {
+    const { children } = this.props
+    return (
+      <ApiContext.Provider value={this.state}>
+        {children}
+      </ApiContext.Provider>
+    )
+  }
+}
+
+class LoginRequiredUnstyled extends React.Component {
+  static propTypes = {
+    classes: PropTypes.object.isRequired,
+    isLoggingIn: PropTypes.bool
+  }
+
+  static styles = theme => ({
+    root: {
+      display: 'flex',
+      alignItems: 'center',
+      '& p': {
+        marginRight: theme.spacing.unit * 2
+      }
+    }
+  })
+
+  render() {
+    const {classes, isLoggingIn} = this.props
+    if (!isLoggingIn) {
+      return (
+        <div className={classes.root}>
+          <Typography>
+            To upload data, you must have a nomad account and you must be logged in.
+          </Typography>
+          <LoginLogout variant="outlined" color="primary"/>
+        </div>
+      )
+    } else {
+      return <LinearProgress />
+    }
+  }
+}
+
+export const ApiProvider = compose(withCookies, withErrors)(ApiProviderComponent)
+
+const LoginRequired = withStyles(LoginRequiredUnstyled.styles)(LoginRequiredUnstyled)
+
+export function withApi(loginRequired) {
+  return function(Component) {
+    function WithApiComponent(props) {
+      return (
+        <ApiContext.Consumer>
+          {apiContext => (
+            (apiContext.user || !loginRequired)
+              ? <Component
+                {...props} {...apiContext} />
+              : <LoginRequired {...apiContext} />
+          )}
+        </ApiContext.Consumer>
+      )
+    }
+    return withErrors(WithApiComponent)
+  }
+}
diff --git a/gui/src/components/help.js b/gui/src/components/help.js
new file mode 100644
index 0000000000000000000000000000000000000000..69220a9d80c79efbe869895c95ae524a3732ad75
--- /dev/null
+++ b/gui/src/components/help.js
@@ -0,0 +1,105 @@
+import React from 'react'
+import { withStyles, Button } from '@material-ui/core'
+import Markdown from './Markdown'
+import PropTypes, { instanceOf } from 'prop-types'
+import { Cookies, withCookies } from 'react-cookie'
+
+export const HelpContext = React.createContext()
+
+class HelpProviderComponent extends React.Component {
+  static propTypes = {
+    children: PropTypes.oneOfType([
+      PropTypes.arrayOf(PropTypes.node),
+      PropTypes.node
+    ]).isRequired,
+    cookies: instanceOf(Cookies).isRequired
+  }
+
+  state = {
+    helpCookies: [],
+    allHelpCookies: [],
+    allClosed: () => this.state.helpCookies.length === this.state.allHelpCookies.length,
+    someClosed: () => this.state.helpCookies.length !== 0,
+    isOpen: (cookie) => {
+      if (this.state.allHelpCookies.indexOf(cookie) === -1) {
+        this.state.allHelpCookies.push(cookie)
+      }
+      return this.state.helpCookies.indexOf(cookie) === -1
+    },
+    gotIt: (cookie) => {
+      const updatedHelpCookies = [...this.state.helpCookies, cookie]
+      this.props.cookies.set('help', updatedHelpCookies)
+      this.setState({helpCookies: updatedHelpCookies})
+    },
+    switchHelp: () => {
+      const updatedCookies = this.state.someClosed() ? [] : this.state.allHelpCookies
+      this.setState({helpCookies: updatedCookies})
+      this.props.cookies.set('help', updatedCookies)
+    }
+  }
+
+  componentDidMount() {
+    this.setState({helpCookies: this.props.cookies.get('help') || []})
+  }
+
+  render() {
+    return (
+      <HelpContext.Provider value={this.state}>
+        {this.props.children}
+      </HelpContext.Provider>
+    )
+  }
+}
+
+class HelpComponent extends React.Component {
+  static styles = theme => ({
+    root: {
+      marginTop: theme.spacing.unit * 2,
+      marginBottom: theme.spacing.unit * 2,
+      borderRadius: theme.spacing.unit * 0.5,
+      border: `1px solid ${theme.palette.primary.main}`,
+      display: 'flex',
+      flexDirection: 'row',
+      alignItems: 'center'
+    },
+    content: {
+      paddingLeft: theme.spacing.unit * 2,
+      flex: '1 1 auto'
+    },
+    actions: {
+      padding: theme.spacing.unit,
+      flex: '0 0 auto'
+    }
+  })
+
+  static propTypes = {
+    classes: PropTypes.object.isRequired,
+    children: PropTypes.any,
+    cookie: PropTypes.string.isRequired
+  }
+
+  render() {
+    const { classes, children, cookie } = this.props
+
+    return (
+      <HelpContext.Consumer>{
+        help => (
+          help.isOpen(cookie)
+            ? <div className={classes.root}>
+              <div className={classes.content}>
+                <Markdown>
+                  {children}
+                </Markdown>
+              </div>
+              <div className={classes.actions}>
+                <Button color="primary" onClick={() => help.gotIt(cookie)}>Got it</Button>
+              </div>
+            </div> : ''
+        )
+      }</HelpContext.Consumer>
+    )
+  }
+}
+
+export const HelpProvider = withCookies(HelpProviderComponent)
+export const Help = withStyles(HelpComponent.styles)(HelpComponent)
diff --git a/gui/src/config.js b/gui/src/config.js
index a3508695f6fbcb61f331e77a805f88213d037e85..a9c760dfd192ffb37d3bf7e884463e09343357af 100644
--- a/gui/src/config.js
+++ b/gui/src/config.js
@@ -5,41 +5,49 @@ import analytics from '@material-ui/core/colors/lightGreen'
 import secondary from '@material-ui/core/colors/blueGrey'
 import { createMuiTheme } from '@material-ui/core'
 
-export const apiBase = process.env.REACT_APP_API_BASE
-export const objectsBase = process.env.REACT_APP_OBJECT_BASE
-export const appBase = process.env.REACT_APP_APP_BASE
-export const appStaticBase = process.env.REACT_APP_APP_STATIC_BASE
-export const debug = process.env.REACT_APP_DEBUG === 'true'
+window.nomadEnv = window.nomadEnv || {}
+export const apiBase = process.env.REACT_APP_API_BASE || window.nomadEnv.apiBase
+export const appBase = process.env.REACT_APP_APP_BASE || window.nomadEnv.appBase
+export const kibanaBase = process.env.REACT_KIBANA_BASE || window.nomadEnv.kibanaBase
+export const appStaticBase = process.env.REACT_APP_APP_STATIC_BASE || window.nomadEnv.appStaticBase
+export const debug = process.env.REACT_APP_DEBUG ? process.env.REACT_APP_DEBUG === 'true' : window.nomadEnv.debug
 
-export const genTheme = createMuiTheme({
+const createTheme = themeData => createMuiTheme({
+  typography: {
+    useNextVariants: true
+  },
+  ...themeData
+})
+
+export const genTheme = createTheme({
   palette: {
     primary: secondary,
     secondary: secondary
   }
 })
 
-export const repoTheme = createMuiTheme({
+export const repoTheme = createTheme({
   palette: {
     primary: repo,
     secondary: repo
   }
 })
 
-export const archiveTheme = createMuiTheme({
+export const archiveTheme = createTheme({
   palette: {
     primary: archive,
     secondary: repo
   }
 })
 
-export const encTheme = createMuiTheme({
+export const encTheme = createTheme({
   palette: {
     primary: enc,
     secondary: repo
   }
 })
 
-export const analyticsTheme = createMuiTheme({
+export const analyticsTheme = createTheme({
   palette: {
     primary: analytics,
     secondary: repo
diff --git a/gui/yarn.lock b/gui/yarn.lock
index 0f25114eeda0f2a249bfbddb351e3cb519c6a351..37ae723361cff111b6b475242b4f225be2554ebe 100644
--- a/gui/yarn.lock
+++ b/gui/yarn.lock
@@ -46,15 +46,27 @@
     esutils "^2.0.2"
     js-tokens "^3.0.0"
 
+"@babel/runtime@7.0.0":
+  version "7.0.0"
+  resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.0.0.tgz#adeb78fedfc855aa05bc041640f3f6f98e85424c"
+  dependencies:
+    regenerator-runtime "^0.12.0"
+
 "@babel/runtime@7.0.0-beta.56":
   version "7.0.0-beta.56"
   resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.0.0-beta.56.tgz#cda612dffd5b1719a7b8e91e3040bd6ae64de8b0"
   dependencies:
     regenerator-runtime "^0.12.0"
 
-"@babel/runtime@7.0.0-rc.1":
-  version "7.0.0-rc.1"
-  resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.0.0-rc.1.tgz#42f36fc5817911c89ea75da2b874054922967616"
+"@babel/runtime@7.1.2":
+  version "7.1.2"
+  resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.1.2.tgz#81c89935f4647706fc54541145e6b4ecfef4b8e3"
+  dependencies:
+    regenerator-runtime "^0.12.0"
+
+"@babel/runtime@7.2.0", "@babel/runtime@^7.0.0", "@babel/runtime@^7.1.2", "@babel/runtime@^7.2.0":
+  version "7.2.0"
+  resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.2.0.tgz#b03e42eeddf5898e00646e4c840fa07ba8dcad7f"
   dependencies:
     regenerator-runtime "^0.12.0"
 
@@ -90,22 +102,34 @@
     lodash "^4.2.0"
     to-fast-properties "^2.0.0"
 
-"@material-ui/core@^1.5.1":
-  version "1.5.1"
-  resolved "https://registry.yarnpkg.com/@material-ui/core/-/core-1.5.1.tgz#cb00cb934447ae688e08129f1dab55f54d29d87a"
+"@kyleshockey/js-yaml@^1.0.1":
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/@kyleshockey/js-yaml/-/js-yaml-1.0.1.tgz#5c036bb67caee77fa887738e695dc02949889bfd"
+  dependencies:
+    argparse "^1.0.7"
+
+"@kyleshockey/object-assign-deep@^0.4.0":
+  version "0.4.2"
+  resolved "https://registry.yarnpkg.com/@kyleshockey/object-assign-deep/-/object-assign-deep-0.4.2.tgz#84900f0eefc372798f4751b5262830b8208922ec"
+
+"@material-ui/core@^3.9.0":
+  version "3.9.0"
+  resolved "https://registry.yarnpkg.com/@material-ui/core/-/core-3.9.0.tgz#7e74cf1979ee65f9fd388145764b3e58f48da6c6"
   dependencies:
-    "@babel/runtime" "7.0.0-rc.1"
-    "@types/jss" "^9.5.3"
+    "@babel/runtime" "^7.2.0"
+    "@material-ui/system" "^3.0.0-alpha.0"
+    "@material-ui/utils" "^3.0.0-alpha.2"
+    "@types/jss" "^9.5.6"
     "@types/react-transition-group" "^2.0.8"
     brcast "^3.0.1"
     classnames "^2.2.5"
     csstype "^2.5.2"
     debounce "^1.1.0"
-    deepmerge "^2.0.1"
+    deepmerge "^3.0.0"
     dom-helpers "^3.2.1"
-    hoist-non-react-statics "^2.5.0"
+    hoist-non-react-statics "^3.2.1"
     is-plain-object "^2.0.4"
-    jss "^9.3.3"
+    jss "^9.8.7"
     jss-camel-case "^6.0.0"
     jss-default-unit "^8.0.2"
     jss-global "^3.0.0"
@@ -117,60 +141,79 @@
     popper.js "^1.14.1"
     prop-types "^15.6.0"
     react-event-listener "^0.6.2"
-    react-jss "^8.1.0"
     react-transition-group "^2.2.1"
-    recompose "^0.28.0"
+    recompose "0.28.0 - 0.30.0"
     warning "^4.0.1"
 
-"@material-ui/docs@^1.0.0-alpha.5":
-  version "1.0.0-alpha.5"
-  resolved "https://registry.yarnpkg.com/@material-ui/docs/-/docs-1.0.0-alpha.5.tgz#95a1311910586893208da9a87f09bf4c7832bdb6"
+"@material-ui/icons@^3.0.2":
+  version "3.0.2"
+  resolved "https://registry.yarnpkg.com/@material-ui/icons/-/icons-3.0.2.tgz#d67a6dd1ec8312d3a88ec97944a63daeef24fe10"
   dependencies:
-    "@babel/runtime" "7.0.0-rc.1"
-    marked "^0.5.0"
-    nprogress "^0.2.0"
-    prismjs "^1.8.4"
+    "@babel/runtime" "^7.2.0"
+    recompose "0.28.0 - 0.30.0"
 
-"@material-ui/icons@^2.0.3":
-  version "2.0.3"
-  resolved "https://registry.yarnpkg.com/@material-ui/icons/-/icons-2.0.3.tgz#d3da9d6e31b1adfbc48efe33c7cb75b32b784096"
+"@material-ui/system@^3.0.0-alpha.0":
+  version "3.0.0-alpha.1"
+  resolved "https://registry.yarnpkg.com/@material-ui/system/-/system-3.0.0-alpha.1.tgz#9309e79a88dc069323b4adbf42e844a2facaf93b"
+  dependencies:
+    "@babel/runtime" "7.1.2"
+    deepmerge "^2.0.1"
+    prop-types "^15.6.0"
+    warning "^4.0.1"
+
+"@material-ui/utils@^3.0.0-alpha.2":
+  version "3.0.0-alpha.3"
+  resolved "https://registry.yarnpkg.com/@material-ui/utils/-/utils-3.0.0-alpha.3.tgz#836c62ea46f5ffc6f0b5ea05ab814704a86908b1"
   dependencies:
-    "@babel/runtime" "7.0.0-rc.1"
-    recompose "^0.28.0"
+    "@babel/runtime" "^7.2.0"
+    prop-types "^15.6.0"
+    react-is "^16.6.3"
 
 "@navjobs/upload@^3.1.3":
   version "3.1.3"
   resolved "https://registry.yarnpkg.com/@navjobs/upload/-/upload-3.1.3.tgz#ea1160016b96b6a5e7139091fc7b53dcb4bbc6d4"
 
-"@types/jss@^9.5.3":
-  version "9.5.4"
-  resolved "https://registry.yarnpkg.com/@types/jss/-/jss-9.5.4.tgz#89a4ee32a14a8d2937187b1a7f443750e964a74d"
+"@types/cookie@^0.3.1":
+  version "0.3.2"
+  resolved "https://registry.yarnpkg.com/@types/cookie/-/cookie-0.3.2.tgz#453f4b14b25da6a8ea4494842dedcbf0151deef9"
+
+"@types/hoist-non-react-statics@^3.0.1":
+  version "3.0.1"
+  resolved "https://registry.yarnpkg.com/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.0.1.tgz#dde7c53101912dae8f45a1807f9857a59ddf3919"
+  dependencies:
+    "@types/react" "*"
+
+"@types/jss@^9.5.6":
+  version "9.5.7"
+  resolved "https://registry.yarnpkg.com/@types/jss/-/jss-9.5.7.tgz#fa57a6d0b38a3abef8a425e3eb6a53495cb9d5a0"
   dependencies:
     csstype "^2.0.0"
     indefinite-observable "^1.0.1"
 
+"@types/object-assign@^4.0.30":
+  version "4.0.30"
+  resolved "https://registry.yarnpkg.com/@types/object-assign/-/object-assign-4.0.30.tgz#8949371d5a99f4381ee0f1df0a9b7a187e07e652"
+
 "@types/prop-types@*":
-  version "15.5.5"
-  resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.5.5.tgz#17038dd322c2325f5da650a94d5f9974943625e3"
-  dependencies:
-    "@types/react" "*"
+  version "15.5.8"
+  resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.5.8.tgz#8ae4e0ea205fe95c3901a5a1df7f66495e3a56ce"
 
 "@types/react-transition-group@^2.0.8":
-  version "2.0.13"
-  resolved "https://registry.yarnpkg.com/@types/react-transition-group/-/react-transition-group-2.0.13.tgz#7769fb61eb71d64d087a713956f086a42c3ee171"
+  version "2.0.15"
+  resolved "https://registry.yarnpkg.com/@types/react-transition-group/-/react-transition-group-2.0.15.tgz#e5ee3fe558832e141cc6041bdd54caea7b787af8"
   dependencies:
     "@types/react" "*"
 
 "@types/react@*":
-  version "16.4.11"
-  resolved "https://registry.yarnpkg.com/@types/react/-/react-16.4.11.tgz#330f3d864300f71150dc2d125e48644c098f8770"
+  version "16.7.18"
+  resolved "https://registry.yarnpkg.com/@types/react/-/react-16.7.18.tgz#f4ce0d539a893dd61e36cd11ae3a5e54f5a48337"
   dependencies:
     "@types/prop-types" "*"
     csstype "^2.2.0"
 
-"@zeit/schemas@2.1.1":
-  version "2.1.1"
-  resolved "https://registry.yarnpkg.com/@zeit/schemas/-/schemas-2.1.1.tgz#bca9d84df177c85f2d2a7dad37512f384761b23d"
+"@zeit/schemas@2.6.0":
+  version "2.6.0"
+  resolved "https://registry.yarnpkg.com/@zeit/schemas/-/schemas-2.6.0.tgz#004e8e553b4cd53d538bd38eac7bcbf58a867fe3"
 
 abab@^1.0.3:
   version "1.0.4"
@@ -214,8 +257,8 @@ acorn@^4.0.3, acorn@^4.0.4:
   resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787"
 
 acorn@^5.0.0, acorn@^5.5.0:
-  version "5.7.1"
-  resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.1.tgz#f095829297706a7c9776958c0afc8930a9b9d9d8"
+  version "5.7.3"
+  resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.3.tgz#67aa231bf8812974b85235a96771eb6bd07ea279"
 
 address@1.0.3, address@^1.0.1:
   version "1.0.3"
@@ -229,7 +272,7 @@ ajv-keywords@^3.0.0:
   version "3.2.0"
   resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.2.0.tgz#e86b819c602cf8821ad637413698f1dec021847a"
 
-ajv@6.5.3, ajv@^6.0.1:
+ajv@6.5.3:
   version "6.5.3"
   resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.5.3.tgz#71a569d189ecf4f4f321224fecb166f071dd90f9"
   dependencies:
@@ -247,6 +290,15 @@ ajv@^5.0.0, ajv@^5.1.5, ajv@^5.2.0, ajv@^5.2.3, ajv@^5.3.0:
     fast-json-stable-stringify "^2.0.0"
     json-schema-traverse "^0.3.0"
 
+ajv@^6.0.1, ajv@^6.5.5:
+  version "6.7.0"
+  resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.7.0.tgz#e3ce7bb372d6577bb1839f1dfdfcbf5ad2948d96"
+  dependencies:
+    fast-deep-equal "^2.0.1"
+    fast-json-stable-stringify "^2.0.0"
+    json-schema-traverse "^0.4.1"
+    uri-js "^4.2.2"
+
 align-text@^0.1.1, align-text@^0.1.3:
   version "0.1.4"
   resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117"
@@ -259,10 +311,6 @@ alphanum-sort@^1.0.1, alphanum-sort@^1.0.2:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/alphanum-sort/-/alphanum-sort-1.0.2.tgz#97a1119649b211ad33691d9f9f486a8ec9fbe0a3"
 
-amdefine@>=0.0.4:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5"
-
 ansi-align@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-2.0.0.tgz#c36aeccba563b89ceb556f3690f0b1d9e3547f7f"
@@ -313,11 +361,11 @@ anymatch@^2.0.0:
     micromatch "^3.1.4"
     normalize-path "^2.1.1"
 
-append-transform@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-1.0.0.tgz#046a52ae582a228bd72f58acfbe2967c678759ab"
+append-transform@^0.4.0:
+  version "0.4.0"
+  resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-0.4.0.tgz#d76ebf8ca94d276e247a36bad44a4b74ab611991"
   dependencies:
-    default-require-extensions "^2.0.0"
+    default-require-extensions "^1.0.0"
 
 aproba@^1.0.3:
   version "1.2.0"
@@ -351,10 +399,6 @@ aria-query@^0.7.0:
     ast-types-flow "0.0.7"
     commander "^2.11.0"
 
-arity-n@^1.0.4:
-  version "1.0.4"
-  resolved "https://registry.yarnpkg.com/arity-n/-/arity-n-1.0.4.tgz#d9e76b11733e08569c0847ae7b39b2860b30b745"
-
 arr-diff@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-2.0.0.tgz#8f3b827f955a8bd669697e4a4256ac3ceae356cf"
@@ -390,8 +434,8 @@ array-flatten@1.1.1:
   resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2"
 
 array-flatten@^2.1.0:
-  version "2.1.1"
-  resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.1.tgz#426bb9da84090c1838d812c8150af20a8331e296"
+  version "2.1.2"
+  resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099"
 
 array-includes@^3.0.3:
   version "3.0.3"
@@ -470,11 +514,11 @@ async-each@^1.0.0:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d"
 
-async@^1.4.0, async@^1.5.2:
+async@^1.5.2:
   version "1.5.2"
   resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a"
 
-async@^2.1.2, async@^2.1.4, async@^2.4.1:
+async@^2.0.1, async@^2.1.2, async@^2.1.4, async@^2.4.1, async@^2.5.0:
   version "2.6.1"
   resolved "https://registry.yarnpkg.com/async/-/async-2.6.1.tgz#b245a23ca71930044ec53fa46aa00a3e87c6a610"
   dependencies:
@@ -1251,9 +1295,13 @@ big.js@^3.1.3:
   version "3.2.0"
   resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e"
 
+big.js@^5.2.2:
+  version "5.2.2"
+  resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328"
+
 binary-extensions@^1.0.0:
-  version "1.11.0"
-  resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.11.0.tgz#46aa1751fb6a2f93ee5e689bb1087d4b14c6c205"
+  version "1.12.0"
+  resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.12.0.tgz#c2d780f53d45bba8317a8902d4ceeaf3a6385b14"
 
 biskviit@1.0.1:
   version "1.0.1"
@@ -1262,27 +1310,27 @@ biskviit@1.0.1:
     psl "^1.1.7"
 
 bluebird@^3.4.7:
-  version "3.5.1"
-  resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.1.tgz#d9551f9de98f1fcda1e683d17ee91a0602ee2eb9"
+  version "3.5.3"
+  resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.3.tgz#7d01c6f9616c9a51ab0f8c549a79dfe6ec33efa7"
 
 bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0:
   version "4.11.8"
   resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f"
 
-body-parser@1.18.2:
-  version "1.18.2"
-  resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.18.2.tgz#87678a19d84b47d859b83199bd59bce222b10454"
+body-parser@1.18.3:
+  version "1.18.3"
+  resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.18.3.tgz#5b292198ffdd553b3a0f20ded0592b956955c8b4"
   dependencies:
     bytes "3.0.0"
     content-type "~1.0.4"
     debug "2.6.9"
-    depd "~1.1.1"
-    http-errors "~1.6.2"
-    iconv-lite "0.4.19"
+    depd "~1.1.2"
+    http-errors "~1.6.3"
+    iconv-lite "0.4.23"
     on-finished "~2.3.0"
-    qs "6.5.1"
-    raw-body "2.3.2"
-    type-is "~1.6.15"
+    qs "6.5.2"
+    raw-body "2.3.3"
+    type-is "~1.6.16"
 
 bonjour@^3.5.0:
   version "3.5.0"
@@ -1434,6 +1482,10 @@ bser@^2.0.0:
   dependencies:
     node-int64 "^0.4.0"
 
+btoa@1.1.2:
+  version "1.1.2"
+  resolved "https://registry.yarnpkg.com/btoa/-/btoa-1.1.2.tgz#3e40b81663f81d2dd6596a4cb714a8dc16cfabe0"
+
 buffer-from@^1.0.0:
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
@@ -1454,6 +1506,13 @@ buffer@^4.3.0:
     ieee754 "^1.1.4"
     isarray "^1.0.0"
 
+buffer@^5.1.0:
+  version "5.2.1"
+  resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.2.1.tgz#dd57fa0f109ac59c602479044dca7b8b3d0b71d6"
+  dependencies:
+    base64-js "^1.0.2"
+    ieee754 "^1.1.4"
+
 builtin-modules@^1.0.0, builtin-modules@^1.1.1:
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f"
@@ -1534,16 +1593,16 @@ caniuse-api@^1.5.2:
     lodash.uniq "^4.5.0"
 
 caniuse-db@^1.0.30000529, caniuse-db@^1.0.30000634, caniuse-db@^1.0.30000639:
-  version "1.0.30000878"
-  resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000878.tgz#0d0c6d8500c3aea21441fad059bce4b8f3f509df"
+  version "1.0.30000928"
+  resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000928.tgz#2e83d2b14276442da239511615eb7c62fed0cfa7"
 
 caniuse-lite@^1.0.30000748, caniuse-lite@^1.0.30000792:
-  version "1.0.30000878"
-  resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000878.tgz#c644c39588dd42d3498e952234c372e5a40a4123"
+  version "1.0.30000928"
+  resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000928.tgz#805e828dc72b06498e3683a32e61c7507fd67b88"
 
 capture-stack-trace@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/capture-stack-trace/-/capture-stack-trace-1.0.0.tgz#4a6fa07399c26bba47f0b2496b4d0fb408c5550d"
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/capture-stack-trace/-/capture-stack-trace-1.0.1.tgz#a6c0bbe1f38f3aa0b92238ecb6ff42c344d4135d"
 
 case-sensitive-paths-webpack-plugin@2.1.1:
   version "2.1.1"
@@ -1570,7 +1629,7 @@ chalk@1.1.3, chalk@^1.1.3:
     strip-ansi "^3.0.0"
     supports-color "^2.0.0"
 
-chalk@2.4.1, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.1:
+chalk@2.4.1:
   version "2.4.1"
   resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.1.tgz#18c49ab16a037b6eb0152cc83e3471338215b66e"
   dependencies:
@@ -1578,6 +1637,14 @@ chalk@2.4.1, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.1:
     escape-string-regexp "^1.0.5"
     supports-color "^5.3.0"
 
+chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.1:
+  version "2.4.2"
+  resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
+  dependencies:
+    ansi-styles "^3.2.1"
+    escape-string-regexp "^1.0.5"
+    supports-color "^5.3.0"
+
 change-emitter@^0.1.2:
   version "0.1.6"
   resolved "https://registry.yarnpkg.com/change-emitter/-/change-emitter-0.1.6.tgz#e8b2fe3d7f1ab7d69a32199aff91ea6931409515"
@@ -1586,10 +1653,6 @@ chardet@^0.4.0:
   version "0.4.2"
   resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.4.2.tgz#b5473b33dc97c424e5d98dc87d55d4d8a29c8bf2"
 
-chickencurry@1.1.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/chickencurry/-/chickencurry-1.1.1.tgz#02655f2b26b3bc2ee1ae1e5316886de38eb79738"
-
 chokidar@^1.6.0:
   version "1.7.0"
   resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-1.7.0.tgz#798e689778151c8076b4b360e5edd28cda2bb468"
@@ -1624,13 +1687,13 @@ chokidar@^2.0.2:
   optionalDependencies:
     fsevents "^1.2.2"
 
-chownr@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.0.1.tgz#e2a75042a9551908bebd25b8523d5f9769d79181"
+chownr@^1.1.1:
+  version "1.1.1"
+  resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.1.tgz#54726b8b8fff4df053c42187e801fb4412df1494"
 
-ci-info@^1.3.0:
-  version "1.4.0"
-  resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-1.4.0.tgz#4841d53cad49f11b827b648ebde27a6e189b412f"
+ci-info@^1.5.0:
+  version "1.6.0"
+  resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-1.6.0.tgz#2ca20dbb9ceb32d4524a683303313f0304b1e497"
 
 cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3:
   version "1.0.4"
@@ -1682,14 +1745,6 @@ cli-width@^2.0.0:
   version "2.2.0"
   resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.0.tgz#ff19ede8a9a5e579324147b0c11f0fbcbabed639"
 
-clipboard@^2.0.0:
-  version "2.0.1"
-  resolved "https://registry.yarnpkg.com/clipboard/-/clipboard-2.0.1.tgz#a12481e1c13d8a50f5f036b0560fe5d16d74e46a"
-  dependencies:
-    good-listener "^1.2.2"
-    select "^1.1.2"
-    tiny-emitter "^2.0.0"
-
 clipboardy@1.2.3:
   version "1.2.3"
   resolved "https://registry.yarnpkg.com/clipboardy/-/clipboardy-1.2.3.tgz#0526361bf78724c1f20be248d428e365433c07ef"
@@ -1739,19 +1794,19 @@ collection-visit@^1.0.0:
     object-visit "^1.0.0"
 
 color-convert@^1.3.0, color-convert@^1.9.0:
-  version "1.9.2"
-  resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.2.tgz#49881b8fba67df12a96bdf3f56c0aab9e7913147"
+  version "1.9.3"
+  resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
   dependencies:
-    color-name "1.1.1"
-
-color-name@1.1.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.1.tgz#4b1415304cf50028ea81643643bd82ea05803689"
+    color-name "1.1.3"
 
-color-name@^1.0.0:
+color-name@1.1.3:
   version "1.1.3"
   resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
 
+color-name@^1.0.0:
+  version "1.1.4"
+  resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
+
 color-string@^0.3.0:
   version "0.3.0"
   resolved "https://registry.yarnpkg.com/color-string/-/color-string-0.3.0.tgz#27d46fb67025c5c2fa25993bfbf579e47841b991"
@@ -1778,45 +1833,35 @@ colors@~1.1.2:
   version "1.1.2"
   resolved "https://registry.yarnpkg.com/colors/-/colors-1.1.2.tgz#168a4701756b6a7f51a12ce0c97bfa28c084ed63"
 
-combined-stream@1.0.6, combined-stream@~1.0.6:
-  version "1.0.6"
-  resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.6.tgz#723e7df6e801ac5613113a7e445a9b69cb632818"
+combined-stream@^1.0.5, combined-stream@^1.0.6, combined-stream@~1.0.6:
+  version "1.0.7"
+  resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.7.tgz#2d1d24317afb8abe95d6d2c0b07b57813539d828"
   dependencies:
     delayed-stream "~1.0.0"
 
-commander@2.17.x, commander@^2.11.0:
+commander@2.17.x, commander@~2.17.1:
   version "2.17.1"
   resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf"
 
-commander@~2.16.0:
-  version "2.16.0"
-  resolved "https://registry.yarnpkg.com/commander/-/commander-2.16.0.tgz#f16390593996ceb4f3eeb020b31d78528f7f8a50"
+commander@^2.11.0:
+  version "2.19.0"
+  resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a"
 
 commondir@^1.0.1:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b"
 
-compare-versions@^3.1.0:
-  version "3.3.1"
-  resolved "https://registry.yarnpkg.com/compare-versions/-/compare-versions-3.3.1.tgz#1ede3172b713c15f7c7beb98cb74d2d82576dad3"
-
 component-emitter@^1.2.1:
   version "1.2.1"
   resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6"
 
-compose-function@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/compose-function/-/compose-function-2.0.0.tgz#e642fa7e1da21529720031476776fc24691ac0b0"
-  dependencies:
-    arity-n "^1.0.4"
-
 compressible@~2.0.14:
-  version "2.0.14"
-  resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.14.tgz#326c5f507fbb055f54116782b969a81b67a29da7"
+  version "2.0.15"
+  resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.15.tgz#857a9ab0a7e5a07d8d837ed43fe2defff64fe212"
   dependencies:
-    mime-db ">= 1.34.0 < 2"
+    mime-db ">= 1.36.0 < 2"
 
-compression@^1.5.2:
+compression@1.7.3, compression@^1.5.2:
   version "1.7.3"
   resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.3.tgz#27e0e176aaf260f7f2c2813c3e440adb9f1993db"
   dependencies:
@@ -1853,8 +1898,8 @@ configstore@^3.0.0:
     xdg-basedir "^3.0.0"
 
 connect-history-api-fallback@^1.3.0:
-  version "1.5.0"
-  resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.5.0.tgz#b06873934bc5e344fef611a196a6faae0aee015a"
+  version "1.6.0"
+  resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc"
 
 console-browserify@^1.1.0:
   version "1.1.0"
@@ -1887,14 +1932,16 @@ content-type@~1.0.4:
   resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b"
 
 convert-source-map@^1.4.0, convert-source-map@^1.5.0, convert-source-map@^1.5.1:
-  version "1.5.1"
-  resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.5.1.tgz#b8278097b9bc229365de5c62cf5fcaed8b5599e5"
+  version "1.6.0"
+  resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.6.0.tgz#51b537a8c43e0f04dec1993bffcdd504e758ac20"
+  dependencies:
+    safe-buffer "~5.1.1"
 
 cookie-signature@1.0.6:
   version "1.0.6"
   resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
 
-cookie@0.3.1:
+cookie@0.3.1, cookie@^0.3.1:
   version "0.3.1"
   resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb"
 
@@ -1913,8 +1960,8 @@ core-js@^1.0.0:
   resolved "https://registry.yarnpkg.com/core-js/-/core-js-1.2.7.tgz#652294c14651db28fa93bd2d5ff2983a4f08c636"
 
 core-js@^2.4.0, core-js@^2.5.0:
-  version "2.5.7"
-  resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.7.tgz#f972608ff0cead68b841a16a932d0b183791814e"
+  version "2.6.2"
+  resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.2.tgz#267988d7268323b349e20b4588211655f0e83944"
 
 core-util-is@1.0.2, core-util-is@~1.0.0:
   version "1.0.2"
@@ -1966,6 +2013,13 @@ create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4:
     safe-buffer "^5.0.1"
     sha.js "^2.4.8"
 
+cross-fetch@0.0.8:
+  version "0.0.8"
+  resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-0.0.8.tgz#01ed94dc407df2c00f1807fde700a7cfa48a205c"
+  dependencies:
+    node-fetch "1.7.3"
+    whatwg-fetch "2.0.3"
+
 cross-spawn@5.1.0, cross-spawn@^5.0.1, cross-spawn@^5.1.0:
   version "5.1.0"
   resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449"
@@ -2027,8 +2081,8 @@ css-select@^1.1.0:
     nth-check "~1.0.1"
 
 css-selector-tokenizer@^0.7.0:
-  version "0.7.0"
-  resolved "https://registry.yarnpkg.com/css-selector-tokenizer/-/css-selector-tokenizer-0.7.0.tgz#e6988474ae8c953477bf5e7efecfceccd9cf4c86"
+  version "0.7.1"
+  resolved "https://registry.yarnpkg.com/css-selector-tokenizer/-/css-selector-tokenizer-0.7.1.tgz#a177271a8bca5019172f4f891fc6eed9cbf68d5d"
   dependencies:
     cssesc "^0.1.0"
     fastparse "^1.1.1"
@@ -2041,8 +2095,8 @@ css-vendor@^0.3.8:
     is-in-browser "^1.0.2"
 
 css-what@2.1:
-  version "2.1.0"
-  resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.0.tgz#9467d032c38cfaefb9f2d79501253062f87fa1bd"
+  version "2.1.2"
+  resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.2.tgz#c0876d9d0480927d7d4920dcd72af3595649554d"
 
 cssesc@^0.1.0:
   version "0.1.0"
@@ -2103,8 +2157,8 @@ cssom@0.3.x, "cssom@>= 0.3.2 < 0.4.0":
     cssom "0.3.x"
 
 csstype@^2.0.0, csstype@^2.2.0, csstype@^2.5.2:
-  version "2.5.6"
-  resolved "https://registry.yarnpkg.com/csstype/-/csstype-2.5.6.tgz#2ae1db2319642d8b80a668d2d025c6196071e788"
+  version "2.6.0"
+  resolved "https://registry.yarnpkg.com/csstype/-/csstype-2.6.0.tgz#6cf7b2fa7fc32aab3d746802c244d4eda71371a2"
 
 currently-unhandled@^0.4.1:
   version "0.4.1"
@@ -2142,12 +2196,18 @@ debug@2.6.9, debug@^2.1.2, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0, debug@^2.6.
   dependencies:
     ms "2.0.0"
 
-debug@^3.0.1, debug@^3.1.0:
+debug@=3.1.0:
   version "3.1.0"
   resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
   dependencies:
     ms "2.0.0"
 
+debug@^3.0.1, debug@^3.1.0:
+  version "3.2.6"
+  resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
+  dependencies:
+    ms "^2.1.1"
+
 decamelize@^1.0.0, decamelize@^1.1.1, decamelize@^1.1.2:
   version "1.2.0"
   resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
@@ -2160,6 +2220,10 @@ deep-equal@^1.0.1:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.0.1.tgz#f5d260292b660e084eff4cdbc9f08ad3247448b5"
 
+deep-extend@^0.5.1:
+  version "0.5.1"
+  resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.5.1.tgz#b894a9dd90d3023fbf1c55a394fb858eb2066f1f"
+
 deep-extend@^0.6.0:
   version "0.6.0"
   resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac"
@@ -2169,14 +2233,18 @@ deep-is@~0.1.3:
   resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34"
 
 deepmerge@^2.0.1:
-  version "2.1.1"
-  resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-2.1.1.tgz#e862b4e45ea0555072bf51e7fd0d9845170ae768"
+  version "2.2.1"
+  resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-2.2.1.tgz#5d3ff22a01c00f645405a2fbc17d0778a1801170"
 
-default-require-extensions@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-2.0.0.tgz#f5f8fbb18a7d6d50b21f641f649ebb522cfe24f7"
+deepmerge@^3.0.0:
+  version "3.0.0"
+  resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-3.0.0.tgz#ca7903b34bfa1f8c2eab6779280775a411bfc6ba"
+
+default-require-extensions@^1.0.0:
+  version "1.0.0"
+  resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-1.0.0.tgz#f37ea15d3e13ffd9b437d33e1a75b5fb97874cb8"
   dependencies:
-    strip-bom "^3.0.0"
+    strip-bom "^2.0.0"
 
 define-properties@^1.1.2:
   version "1.1.3"
@@ -2207,7 +2275,7 @@ defined@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693"
 
-del@^2.0.2, del@^2.2.2:
+del@^2.2.2:
   version "2.2.2"
   resolved "https://registry.yarnpkg.com/del/-/del-2.2.2.tgz#c12c981d067846c84bcaf862cff930d907ffd1a8"
   dependencies:
@@ -2234,19 +2302,11 @@ delayed-stream@~1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
 
-delegate@^3.1.2:
-  version "3.2.0"
-  resolved "https://registry.yarnpkg.com/delegate/-/delegate-3.2.0.tgz#b66b71c3158522e8ab5744f720d8ca0c2af59166"
-
 delegates@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a"
 
-depd@1.1.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.1.tgz#5783b4e1c459f06fa5ca27f991f3d06e7a310359"
-
-depd@~1.1.1, depd@~1.1.2:
+depd@~1.1.2:
   version "1.1.2"
   resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
 
@@ -2272,8 +2332,8 @@ detect-libc@^1.0.2:
   resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b"
 
 detect-node@^2.0.3:
-  version "2.0.3"
-  resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.3.tgz#a2033c09cc8e158d37748fbde7507832bd6ce127"
+  version "2.0.4"
+  resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c"
 
 detect-port-alt@1.1.6:
   version "1.1.6"
@@ -2324,15 +2384,17 @@ doctrine@^2.0.0, doctrine@^2.1.0:
   dependencies:
     esutils "^2.0.2"
 
-dom-converter@~0.1:
-  version "0.1.4"
-  resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.1.4.tgz#a45ef5727b890c9bffe6d7c876e7b19cb0e17f3b"
+dom-converter@~0.2:
+  version "0.2.0"
+  resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768"
   dependencies:
-    utila "~0.3"
+    utila "~0.4"
 
 dom-helpers@^3.2.1, dom-helpers@^3.3.1:
-  version "3.3.1"
-  resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-3.3.1.tgz#fc1a4e15ffdf60ddde03a480a9c0fece821dd4a6"
+  version "3.4.0"
+  resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-3.4.0.tgz#e9b369700f959f62ecde5a6babde4bccd9169af8"
+  dependencies:
+    "@babel/runtime" "^7.1.2"
 
 dom-serializer@0:
   version "0.1.0"
@@ -2352,8 +2414,8 @@ domain-browser@^1.1.1:
   resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda"
 
 domelementtype@1, domelementtype@^1.3.0:
-  version "1.3.0"
-  resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.0.tgz#b17aed82e8ab59e52dd9c19b1756e0fc187204c2"
+  version "1.3.1"
+  resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f"
 
 domelementtype@~1.1.1:
   version "1.1.3"
@@ -2365,7 +2427,7 @@ domhandler@2.1:
   dependencies:
     domelementtype "1"
 
-domhandler@^2.3.0:
+domhandler@^2.3.0, domhandler@^2.4.2:
   version "2.4.2"
   resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803"
   dependencies:
@@ -2425,8 +2487,8 @@ ee-first@1.1.1:
   resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
 
 electron-to-chromium@^1.2.7, electron-to-chromium@^1.3.30:
-  version "1.3.59"
-  resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.59.tgz#6377db04d8d3991d6286c72ed5c3fde6f4aaf112"
+  version "1.3.103"
+  resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.103.tgz#a695777efdbc419cad6cbb0e58458251302cd52f"
 
 elliptic@^6.0.0:
   version "6.4.1"
@@ -2448,6 +2510,10 @@ emojis-list@^2.0.0:
   version "2.1.0"
   resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389"
 
+encode-3986@^1.0.0:
+  version "1.0.0"
+  resolved "https://registry.yarnpkg.com/encode-3986/-/encode-3986-1.0.0.tgz#940d51498f8741ade184b75ad1439b317c0c7a60"
+
 encodeurl@~1.0.2:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59"
@@ -2468,8 +2534,8 @@ enhanced-resolve@^3.4.0:
     tapable "^0.2.7"
 
 entities@^1.1.1, entities@~1.1.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.1.tgz#6e5c2d0a5621b5dadaecef80b90edfb5cd7772f0"
+  version "1.1.2"
+  resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56"
 
 errno@^0.1.3, errno@~0.1.7:
   version "0.1.7"
@@ -2483,23 +2549,24 @@ error-ex@^1.2.0:
   dependencies:
     is-arrayish "^0.2.1"
 
-es-abstract@^1.7.0:
-  version "1.12.0"
-  resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.12.0.tgz#9dbbdd27c6856f0001421ca18782d786bf8a6165"
+es-abstract@^1.11.0, es-abstract@^1.7.0:
+  version "1.13.0"
+  resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.13.0.tgz#ac86145fdd5099d8dd49558ccba2eaf9b88e24e9"
   dependencies:
-    es-to-primitive "^1.1.1"
+    es-to-primitive "^1.2.0"
     function-bind "^1.1.1"
-    has "^1.0.1"
-    is-callable "^1.1.3"
+    has "^1.0.3"
+    is-callable "^1.1.4"
     is-regex "^1.0.4"
+    object-keys "^1.0.12"
 
-es-to-primitive@^1.1.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.1.1.tgz#45355248a88979034b6792e19bb81f2b7975dd0d"
+es-to-primitive@^1.2.0:
+  version "1.2.0"
+  resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.0.tgz#edf72478033456e8dda8ef09e00ad9650707f377"
   dependencies:
-    is-callable "^1.1.1"
+    is-callable "^1.1.4"
     is-date-object "^1.0.1"
-    is-symbol "^1.0.1"
+    is-symbol "^1.0.2"
 
 es5-ext@^0.10.14, es5-ext@^0.10.35, es5-ext@^0.10.9, es5-ext@~0.10.14:
   version "0.10.46"
@@ -2529,8 +2596,8 @@ es6-map@^0.1.3:
     event-emitter "~0.3.5"
 
 es6-promise@^4.0.5:
-  version "4.2.4"
-  resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.4.tgz#dc4221c2b16518760bd8c39a52d8f356fc00ed29"
+  version "4.2.5"
+  resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.5.tgz#da6d0d5692efb461e082c14817fe2427d8f5d054"
 
 es6-set@~0.1.5:
   version "0.1.5"
@@ -2592,7 +2659,7 @@ eslint-config-react-app@^2.1.0:
 
 eslint-config-standard@^11.0.0:
   version "11.0.0"
-  resolved "http://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-11.0.0.tgz#87ee0d3c9d95382dc761958cbb23da9eea31e0ba"
+  resolved "https://registry.yarnpkg.com/eslint-config-standard/-/eslint-config-standard-11.0.0.tgz#87ee0d3c9d95382dc761958cbb23da9eea31e0ba"
 
 eslint-import-resolver-node@^0.3.1:
   version "0.3.2"
@@ -2618,6 +2685,13 @@ eslint-module-utils@^2.1.1, eslint-module-utils@^2.2.0:
     debug "^2.6.8"
     pkg-dir "^1.0.0"
 
+eslint-plugin-es@^1.3.1:
+  version "1.4.0"
+  resolved "https://registry.yarnpkg.com/eslint-plugin-es/-/eslint-plugin-es-1.4.0.tgz#475f65bb20c993fc10e8c8fe77d1d60068072da6"
+  dependencies:
+    eslint-utils "^1.3.0"
+    regexpp "^2.0.1"
+
 eslint-plugin-flowtype@2.39.1:
   version "2.39.1"
   resolved "https://registry.yarnpkg.com/eslint-plugin-flowtype/-/eslint-plugin-flowtype-2.39.1.tgz#b5624622a0388bcd969f4351131232dcb9649cd5"
@@ -2666,6 +2740,17 @@ eslint-plugin-jsx-a11y@5.1.1:
     emoji-regex "^6.1.0"
     jsx-ast-utils "^1.4.0"
 
+eslint-plugin-node@^8.0.1:
+  version "8.0.1"
+  resolved "https://registry.yarnpkg.com/eslint-plugin-node/-/eslint-plugin-node-8.0.1.tgz#55ae3560022863d141fa7a11799532340a685964"
+  dependencies:
+    eslint-plugin-es "^1.3.1"
+    eslint-utils "^1.3.1"
+    ignore "^5.0.2"
+    minimatch "^3.0.4"
+    resolve "^1.8.1"
+    semver "^5.5.0"
+
 eslint-plugin-promise@^3.7.0:
   version "3.8.0"
   resolved "https://registry.yarnpkg.com/eslint-plugin-promise/-/eslint-plugin-promise-3.8.0.tgz#65ebf27a845e3c1e9d6f6a5622ddd3801694b621"
@@ -2680,14 +2765,16 @@ eslint-plugin-react@7.4.0:
     prop-types "^15.5.10"
 
 eslint-plugin-react@^7.11.1:
-  version "7.11.1"
-  resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.11.1.tgz#c01a7af6f17519457d6116aa94fc6d2ccad5443c"
+  version "7.12.3"
+  resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.12.3.tgz#b9ca4cd7cd3f5d927db418a1950366a12d4568fd"
   dependencies:
     array-includes "^3.0.3"
     doctrine "^2.1.0"
     has "^1.0.3"
     jsx-ast-utils "^2.0.1"
+    object.fromentries "^2.0.0"
     prop-types "^15.6.2"
+    resolve "^1.9.0"
 
 eslint-plugin-standard@^3.1.0:
   version "3.1.0"
@@ -2707,6 +2794,10 @@ eslint-scope@^3.7.1:
     esrecurse "^4.1.0"
     estraverse "^4.1.1"
 
+eslint-utils@^1.3.0, eslint-utils@^1.3.1:
+  version "1.3.1"
+  resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-1.3.1.tgz#9a851ba89ee7c460346f97cf8939c7298827e512"
+
 eslint-visitor-keys@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz#3f3180fb2e291017716acb4c9d6d5b5c34a6a81d"
@@ -2928,12 +3019,12 @@ expand-tilde@^2.0.0, expand-tilde@^2.0.2:
     homedir-polyfill "^1.0.1"
 
 express@^4.13.3:
-  version "4.16.3"
-  resolved "https://registry.yarnpkg.com/express/-/express-4.16.3.tgz#6af8a502350db3246ecc4becf6b5a34d22f7ed53"
+  version "4.16.4"
+  resolved "https://registry.yarnpkg.com/express/-/express-4.16.4.tgz#fddef61926109e24c515ea97fd2f1bdbf62df12e"
   dependencies:
     accepts "~1.3.5"
     array-flatten "1.1.1"
-    body-parser "1.18.2"
+    body-parser "1.18.3"
     content-disposition "0.5.2"
     content-type "~1.0.4"
     cookie "0.3.1"
@@ -2950,10 +3041,10 @@ express@^4.13.3:
     on-finished "~2.3.0"
     parseurl "~1.3.2"
     path-to-regexp "0.1.7"
-    proxy-addr "~2.0.3"
-    qs "6.5.1"
+    proxy-addr "~2.0.4"
+    qs "6.5.2"
     range-parser "~1.2.0"
-    safe-buffer "5.1.1"
+    safe-buffer "5.1.2"
     send "0.16.2"
     serve-static "1.13.2"
     setprototypeof "1.1.0"
@@ -3031,6 +3122,12 @@ fast-deep-equal@^2.0.1:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49"
 
+fast-json-patch@^2.0.6:
+  version "2.0.7"
+  resolved "https://registry.yarnpkg.com/fast-json-patch/-/fast-json-patch-2.0.7.tgz#55864b08b1e50381d2f37fd472bb2e18fe54a733"
+  dependencies:
+    deep-equal "^1.0.1"
+
 fast-json-stable-stringify@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2"
@@ -3046,8 +3143,8 @@ fast-url-parser@1.1.3:
     punycode "^1.3.2"
 
 fastparse@^1.1.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/fastparse/-/fastparse-1.1.1.tgz#d1e2643b38a94d7583b479060e6c4affc94071f8"
+  version "1.1.2"
+  resolved "https://registry.yarnpkg.com/fastparse/-/fastparse-1.1.2.tgz#91728c5a5942eced8531283c79441ee4122c35a9"
 
 faye-websocket@^0.10.0:
   version "0.10.0"
@@ -3079,7 +3176,7 @@ fbemitter@^2.0.0:
   dependencies:
     fbjs "^0.8.4"
 
-fbjs@^0.8.0, fbjs@^0.8.1, fbjs@^0.8.16, fbjs@^0.8.4:
+fbjs@^0.8.0, fbjs@^0.8.1, fbjs@^0.8.4:
   version "0.8.17"
   resolved "https://registry.yarnpkg.com/fbjs/-/fbjs-0.8.17.tgz#c4d598ead6949112653d6588b01a5cdcd9f90fdd"
   dependencies:
@@ -3118,6 +3215,10 @@ file-loader@1.1.5:
     loader-utils "^1.0.2"
     schema-utils "^0.3.0"
 
+file-saver@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/file-saver/-/file-saver-2.0.0.tgz#74eef7748159503b60008a15af2f1930fb5df7ab"
+
 filename-regex@^2.0.0:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/filename-regex/-/filename-regex-2.0.1.tgz#c1c4b9bee3e09725ddb106b75c1e301fe2f18b26"
@@ -3194,12 +3295,12 @@ find-up@^2.0.0, find-up@^2.1.0:
     locate-path "^2.0.0"
 
 flat-cache@^1.2.1:
-  version "1.3.0"
-  resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-1.3.0.tgz#d3030b32b38154f4e3b7e9c709f490f7ef97c481"
+  version "1.3.4"
+  resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-1.3.4.tgz#2c2ef77525cc2929007dfffa1dd314aa9c9dee6f"
   dependencies:
     circular-json "^0.3.1"
-    del "^2.0.2"
     graceful-fs "^4.1.2"
+    rimraf "~2.6.2"
     write "^0.2.1"
 
 flatten@^1.0.2:
@@ -3214,10 +3315,10 @@ flux@^3.1.3:
     fbjs "^0.8.0"
 
 follow-redirects@^1.0.0:
-  version "1.5.6"
-  resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.5.6.tgz#44eb4fe1981dff25e2bd86b7d4033abcdb81e965"
+  version "1.6.1"
+  resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.6.1.tgz#514973c44b5757368bad8bddfe52f81f015c94cb"
   dependencies:
-    debug "^3.1.0"
+    debug "=3.1.0"
 
 for-in@^1.0.1, for-in@^1.0.2:
   version "1.0.2"
@@ -3233,12 +3334,20 @@ forever-agent@~0.6.1:
   version "0.6.1"
   resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91"
 
+form-data@^1.0.0-rc3:
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/form-data/-/form-data-1.0.1.tgz#ae315db9a4907fa065502304a66d7733475ee37c"
+  dependencies:
+    async "^2.0.1"
+    combined-stream "^1.0.5"
+    mime-types "^2.1.11"
+
 form-data@~2.3.2:
-  version "2.3.2"
-  resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.2.tgz#4970498be604c20c005d4f5c23aecd21d6b49099"
+  version "2.3.3"
+  resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6"
   dependencies:
     asynckit "^0.4.0"
-    combined-stream "1.0.6"
+    combined-stream "^1.0.6"
     mime-types "^2.1.12"
 
 forwarded@~0.1.2:
@@ -3284,8 +3393,8 @@ fs.realpath@^1.0.0:
   resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
 
 fsevents@^1.0.0, fsevents@^1.1.3, fsevents@^1.2.2:
-  version "1.2.4"
-  resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.4.tgz#f41dcb1af2582af3692da36fc55cbd8e1041c426"
+  version "1.2.6"
+  resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.6.tgz#d3a1864a71876a2eb9b244e3bd8f606eb09568c0"
   dependencies:
     nan "^2.9.2"
     node-pre-gyp "^0.10.0"
@@ -3353,13 +3462,9 @@ glob-parent@^3.1.0:
     is-glob "^3.1.0"
     path-dirname "^1.0.0"
 
-glob-slash@1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/glob-slash/-/glob-slash-1.0.0.tgz#fe52efa433233f74a2fe64c7abb9bc848202ab95"
-
-glob@^7.0.3, glob@^7.0.5, glob@^7.1.1, glob@^7.1.2:
-  version "7.1.2"
-  resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15"
+glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3:
+  version "7.1.3"
+  resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.3.tgz#3960832d3f1574108342dafd3a67b332c0969df1"
   dependencies:
     fs.realpath "^1.0.0"
     inflight "^1.0.4"
@@ -3393,8 +3498,8 @@ global-prefix@^1.0.1:
     which "^1.2.14"
 
 globals@^11.0.1, globals@^11.1.0:
-  version "11.7.0"
-  resolved "https://registry.yarnpkg.com/globals/-/globals-11.7.0.tgz#a583faa43055b1aca771914bf68258e2fc125673"
+  version "11.10.0"
+  resolved "https://registry.yarnpkg.com/globals/-/globals-11.10.0.tgz#1e09776dffda5e01816b3bb4077c8b59c24eaa50"
 
 globals@^9.17.0, globals@^9.18.0:
   version "9.18.0"
@@ -3421,12 +3526,6 @@ globby@^6.1.0:
     pify "^2.0.0"
     pinkie-promise "^2.0.0"
 
-good-listener@^1.2.2:
-  version "1.2.2"
-  resolved "https://registry.yarnpkg.com/good-listener/-/good-listener-1.2.2.tgz#d53b30cdf9313dffb7dc9a0d477096aa6d145c50"
-  dependencies:
-    delegate "^3.1.2"
-
 got@^6.7.1:
   version "6.7.1"
   resolved "https://registry.yarnpkg.com/got/-/got-6.7.1.tgz#240cd05785a9a18e561dc1b44b41c763ef1e8db0"
@@ -3444,8 +3543,8 @@ got@^6.7.1:
     url-parse-lax "^1.0.0"
 
 graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.1.9:
-  version "4.1.11"
-  resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658"
+  version "4.1.15"
+  resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.15.tgz#ffb703e1066e8a0eeaa4c8b80ba9253eeefbfb00"
 
 growly@^1.3.0:
   version "1.3.0"
@@ -3462,24 +3561,24 @@ handle-thing@^1.2.5:
   resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-1.2.5.tgz#fd7aad726bf1a5fd16dfc29b2f7a6601d27139c4"
 
 handlebars@^4.0.3:
-  version "4.0.11"
-  resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.0.11.tgz#630a35dfe0294bc281edae6ffc5d329fc7982dcc"
+  version "4.0.12"
+  resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.0.12.tgz#2c15c8a96d46da5e266700518ba8cb8d919d5bc5"
   dependencies:
-    async "^1.4.0"
+    async "^2.5.0"
     optimist "^0.6.1"
-    source-map "^0.4.4"
+    source-map "^0.6.1"
   optionalDependencies:
-    uglify-js "^2.6"
+    uglify-js "^3.1.4"
 
 har-schema@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92"
 
 har-validator@~5.1.0:
-  version "5.1.0"
-  resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.0.tgz#44657f5688a22cfd4b72486e81b3a3fb11742c29"
+  version "5.1.3"
+  resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080"
   dependencies:
-    ajv "^5.3.0"
+    ajv "^6.5.5"
     har-schema "^2.0.0"
 
 has-ansi@^2.0.0:
@@ -3500,6 +3599,10 @@ has-flag@^3.0.0:
   version "3.0.0"
   resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
 
+has-symbols@^1.0.0:
+  version "1.0.0"
+  resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.0.tgz#ba1a8f1af2a0fc39650f5c850367704122063b44"
+
 has-unicode@^2.0.0:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9"
@@ -3545,19 +3648,19 @@ hash-base@^3.0.0:
     safe-buffer "^5.0.1"
 
 hash.js@^1.0.0, hash.js@^1.0.3:
-  version "1.1.5"
-  resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.5.tgz#e38ab4b85dfb1e0c40fe9265c0e9b54854c23812"
+  version "1.1.7"
+  resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42"
   dependencies:
     inherits "^2.0.3"
     minimalistic-assert "^1.0.1"
 
-he@1.1.x:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/he/-/he-1.1.1.tgz#93410fd21b009735151f8868c2f271f3427e23fd"
+he@1.2.x:
+  version "1.2.0"
+  resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
 
 highlight.js@^9.11.0:
-  version "9.12.0"
-  resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-9.12.0.tgz#e6d9dbe57cbefe60751f02af336195870c90c01e"
+  version "9.13.1"
+  resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-9.13.1.tgz#054586d53a6863311168488a0f58d6c505ce641e"
 
 history@^4.7.2:
   version "4.7.2"
@@ -3581,6 +3684,18 @@ hoist-non-react-statics@^2.3.1, hoist-non-react-statics@^2.5.0:
   version "2.5.5"
   resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-2.5.5.tgz#c5903cf409c0dfd908f388e619d86b9c1174cb47"
 
+hoist-non-react-statics@^3.0.0:
+  version "3.3.0"
+  resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.0.tgz#b09178f0122184fb95acf525daaecb4d8f45958b"
+  dependencies:
+    react-is "^16.7.0"
+
+hoist-non-react-statics@^3.2.1:
+  version "3.2.1"
+  resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.2.1.tgz#c09c0555c84b38a7ede6912b61efddafd6e75e1e"
+  dependencies:
+    react-is "^16.3.2"
+
 home-or-tmp@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8"
@@ -3608,8 +3723,8 @@ hpack.js@^2.1.6:
     wbuf "^1.1.0"
 
 html-comment-regex@^1.1.0:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/html-comment-regex/-/html-comment-regex-1.1.1.tgz#668b93776eaae55ebde8f3ad464b307a4963625e"
+  version "1.1.2"
+  resolved "https://registry.yarnpkg.com/html-comment-regex/-/html-comment-regex-1.1.2.tgz#97d4688aeb5c81886a364faa0cad1dda14d433a7"
 
 html-encoding-sniffer@^1.0.1:
   version "1.0.2"
@@ -3622,26 +3737,26 @@ html-entities@^1.2.0:
   resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f"
 
 html-minifier@^3.2.3:
-  version "3.5.20"
-  resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.20.tgz#7b19fd3caa0cb79f7cde5ee5c3abdf8ecaa6bb14"
+  version "3.5.21"
+  resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c"
   dependencies:
     camel-case "3.0.x"
     clean-css "4.2.x"
     commander "2.17.x"
-    he "1.1.x"
+    he "1.2.x"
     param-case "2.1.x"
     relateurl "0.2.x"
     uglify-js "3.4.x"
 
 html-to-react@^1.3.3:
-  version "1.3.3"
-  resolved "https://registry.yarnpkg.com/html-to-react/-/html-to-react-1.3.3.tgz#e41666a735f9997ed2372dcd21d8b0e42b334467"
+  version "1.3.4"
+  resolved "https://registry.yarnpkg.com/html-to-react/-/html-to-react-1.3.4.tgz#647b3a54fdec73a6461864b129fb0d1eec7d4589"
   dependencies:
-    domhandler "^2.3.0"
+    domhandler "^2.4.2"
     escape-string-regexp "^1.0.5"
-    htmlparser2 "^3.8.3"
-    ramda "^0.25.0"
-    underscore.string.fp "^1.0.4"
+    htmlparser2 "^3.10.0"
+    lodash.camelcase "^4.3.0"
+    ramda "^0.26"
 
 html-webpack-plugin@2.29.0:
   version "2.29.0"
@@ -3654,16 +3769,16 @@ html-webpack-plugin@2.29.0:
     pretty-error "^2.0.2"
     toposort "^1.0.0"
 
-htmlparser2@^3.8.3:
-  version "3.9.2"
-  resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.9.2.tgz#1bdf87acca0f3f9e53fa4fcceb0f4b4cbb00b338"
+htmlparser2@^3.10.0:
+  version "3.10.0"
+  resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.0.tgz#5f5e422dcf6119c0d983ed36260ce9ded0bee464"
   dependencies:
     domelementtype "^1.3.0"
     domhandler "^2.3.0"
     domutils "^1.5.1"
     entities "^1.1.1"
     inherits "^2.0.1"
-    readable-stream "^2.0.2"
+    readable-stream "^3.0.6"
 
 htmlparser2@~3.3.0:
   version "3.3.0"
@@ -3678,16 +3793,7 @@ http-deceiver@^1.2.7:
   version "1.2.7"
   resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87"
 
-http-errors@1.6.2:
-  version "1.6.2"
-  resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.2.tgz#0a002cc85707192a7e7946ceedc11155f60ec736"
-  dependencies:
-    depd "1.1.1"
-    inherits "2.0.3"
-    setprototypeof "1.0.3"
-    statuses ">= 1.3.1 < 2"
-
-http-errors@~1.6.2:
+http-errors@1.6.3, http-errors@~1.6.2, http-errors@~1.6.3:
   version "1.6.3"
   resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d"
   dependencies:
@@ -3697,8 +3803,8 @@ http-errors@~1.6.2:
     statuses ">= 1.4.0 < 2"
 
 http-parser-js@>=0.4.0:
-  version "0.4.13"
-  resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.13.tgz#3bd6d6fde6e3172c9334c3b33b6c193d80fe1137"
+  version "0.5.0"
+  resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.0.tgz#d65edbede84349d0dc30320815a15d39cc3cbbd8"
 
 http-proxy-middleware@~0.17.4:
   version "0.17.4"
@@ -3733,16 +3839,18 @@ hyphenate-style-name@^1.0.2:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/hyphenate-style-name/-/hyphenate-style-name-1.0.2.tgz#31160a36930adaf1fc04c6074f7eb41465d4ec4b"
 
-iconv-lite@0.4.19:
-  version "0.4.19"
-  resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.19.tgz#f7468f60135f5e5dad3399c0a81be9a1603a082b"
-
-iconv-lite@0.4.23, iconv-lite@^0.4.17, iconv-lite@^0.4.4, iconv-lite@~0.4.13:
+iconv-lite@0.4.23:
   version "0.4.23"
   resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.23.tgz#297871f63be507adcfbfca715d0cd0eed84e9a63"
   dependencies:
     safer-buffer ">= 2.1.2 < 3"
 
+iconv-lite@0.4.24, iconv-lite@^0.4.17, iconv-lite@^0.4.4, iconv-lite@~0.4.13:
+  version "0.4.24"
+  resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
+  dependencies:
+    safer-buffer ">= 2.1.2 < 3"
+
 icss-replace-symbols@^1.1.0:
   version "1.1.0"
   resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded"
@@ -3767,6 +3875,10 @@ ignore@^3.3.3:
   version "3.3.10"
   resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043"
 
+ignore@^5.0.2:
+  version "5.0.4"
+  resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.0.4.tgz#33168af4a21e99b00c5d41cbadb6a6cb49903a45"
+
 import-lazy@^2.1.0:
   version "2.1.0"
   resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43"
@@ -3783,10 +3895,10 @@ imurmurhash@^0.1.4:
   resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
 
 indefinite-observable@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/indefinite-observable/-/indefinite-observable-1.0.1.tgz#09915423cc8d6f7eb1cb7882ad134633c9a6edc3"
+  version "1.0.2"
+  resolved "https://registry.yarnpkg.com/indefinite-observable/-/indefinite-observable-1.0.2.tgz#0a328793ab2385d4b9dca23eaab4afe6936a73f8"
   dependencies:
-    symbol-observable "1.0.4"
+    symbol-observable "1.2.0"
 
 indent-string@^2.1.0:
   version "2.1.0"
@@ -3847,8 +3959,8 @@ internal-ip@1.2.0:
     meow "^3.3.0"
 
 interpret@^1.0.0:
-  version "1.1.0"
-  resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.1.0.tgz#7ed1b1410c6a0e0f78cf95d3b8440c63f78b8614"
+  version "1.2.0"
+  resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296"
 
 invariant@^2.2.0, invariant@^2.2.1, invariant@^2.2.2, invariant@^2.2.4:
   version "2.2.4"
@@ -3904,15 +4016,15 @@ is-builtin-module@^1.0.0:
   dependencies:
     builtin-modules "^1.0.0"
 
-is-callable@^1.1.1, is-callable@^1.1.3:
+is-callable@^1.1.4:
   version "1.1.4"
   resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75"
 
 is-ci@^1.0.10:
-  version "1.2.0"
-  resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-1.2.0.tgz#3f4a08d6303a09882cef3f0fb97439c5f5ce2d53"
+  version "1.2.1"
+  resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-1.2.1.tgz#e3779c8ee17fccf428488f6e281187f2e632841c"
   dependencies:
-    ci-info "^1.3.0"
+    ci-info "^1.5.0"
 
 is-data-descriptor@^0.1.4:
   version "0.1.4"
@@ -3994,10 +4106,6 @@ is-fullwidth-code-point@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
 
-is-function@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/is-function/-/is-function-1.0.1.tgz#12cfb98b65b57dd3d193a3121f5f6e2f437602b5"
-
 is-glob@^2.0.0, is-glob@^2.0.1:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863"
@@ -4121,9 +4229,11 @@ is-svg@^2.0.0:
   dependencies:
     html-comment-regex "^1.1.0"
 
-is-symbol@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.1.tgz#3cc59f00025194b6ab2e38dbae6689256b660572"
+is-symbol@^1.0.2:
+  version "1.0.2"
+  resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.2.tgz#a055f6ae57192caee329e7a860118b497a950f38"
+  dependencies:
+    has-symbols "^1.0.0"
 
 is-typedarray@~1.0.0:
   version "1.0.0"
@@ -4170,81 +4280,76 @@ isomorphic-fetch@^2.1.1:
     node-fetch "^1.0.1"
     whatwg-fetch ">=0.10.0"
 
+isomorphic-form-data@0.0.1:
+  version "0.0.1"
+  resolved "https://registry.yarnpkg.com/isomorphic-form-data/-/isomorphic-form-data-0.0.1.tgz#026f627e032b0cd8413ecc8755928b94a468b062"
+  dependencies:
+    form-data "^1.0.0-rc3"
+
 isstream@~0.1.2:
   version "0.1.2"
   resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a"
 
 istanbul-api@^1.1.1:
-  version "1.3.1"
-  resolved "https://registry.yarnpkg.com/istanbul-api/-/istanbul-api-1.3.1.tgz#4c3b05d18c0016d1022e079b98dc82c40f488954"
+  version "1.3.7"
+  resolved "https://registry.yarnpkg.com/istanbul-api/-/istanbul-api-1.3.7.tgz#a86c770d2b03e11e3f778cd7aedd82d2722092aa"
   dependencies:
     async "^2.1.4"
-    compare-versions "^3.1.0"
     fileset "^2.0.2"
-    istanbul-lib-coverage "^1.2.0"
-    istanbul-lib-hook "^1.2.0"
-    istanbul-lib-instrument "^1.10.1"
-    istanbul-lib-report "^1.1.4"
-    istanbul-lib-source-maps "^1.2.4"
-    istanbul-reports "^1.3.0"
+    istanbul-lib-coverage "^1.2.1"
+    istanbul-lib-hook "^1.2.2"
+    istanbul-lib-instrument "^1.10.2"
+    istanbul-lib-report "^1.1.5"
+    istanbul-lib-source-maps "^1.2.6"
+    istanbul-reports "^1.5.1"
     js-yaml "^3.7.0"
     mkdirp "^0.5.1"
     once "^1.4.0"
 
-istanbul-lib-coverage@^1.0.1, istanbul-lib-coverage@^1.1.2, istanbul-lib-coverage@^1.2.0:
-  version "1.2.0"
-  resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.0.tgz#f7d8f2e42b97e37fe796114cb0f9d68b5e3a4341"
-
-istanbul-lib-hook@^1.2.0:
+istanbul-lib-coverage@^1.0.1, istanbul-lib-coverage@^1.2.1:
   version "1.2.1"
-  resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.2.1.tgz#f614ec45287b2a8fc4f07f5660af787575601805"
+  resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.1.tgz#ccf7edcd0a0bb9b8f729feeb0930470f9af664f0"
+
+istanbul-lib-hook@^1.2.2:
+  version "1.2.2"
+  resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.2.2.tgz#bc6bf07f12a641fbf1c85391d0daa8f0aea6bf86"
   dependencies:
-    append-transform "^1.0.0"
+    append-transform "^0.4.0"
 
-istanbul-lib-instrument@^1.10.1, istanbul-lib-instrument@^1.4.2:
-  version "1.10.1"
-  resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.1.tgz#724b4b6caceba8692d3f1f9d0727e279c401af7b"
+istanbul-lib-instrument@^1.10.1, istanbul-lib-instrument@^1.10.2, istanbul-lib-instrument@^1.4.2:
+  version "1.10.2"
+  resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.2.tgz#1f55ed10ac3c47f2bdddd5307935126754d0a9ca"
   dependencies:
     babel-generator "^6.18.0"
     babel-template "^6.16.0"
     babel-traverse "^6.18.0"
     babel-types "^6.18.0"
     babylon "^6.18.0"
-    istanbul-lib-coverage "^1.2.0"
+    istanbul-lib-coverage "^1.2.1"
     semver "^5.3.0"
 
-istanbul-lib-report@^1.1.4:
-  version "1.1.4"
-  resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-1.1.4.tgz#e886cdf505c4ebbd8e099e4396a90d0a28e2acb5"
+istanbul-lib-report@^1.1.5:
+  version "1.1.5"
+  resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-1.1.5.tgz#f2a657fc6282f96170aaf281eb30a458f7f4170c"
   dependencies:
-    istanbul-lib-coverage "^1.2.0"
+    istanbul-lib-coverage "^1.2.1"
     mkdirp "^0.5.1"
     path-parse "^1.0.5"
     supports-color "^3.1.2"
 
-istanbul-lib-source-maps@^1.1.0:
-  version "1.2.3"
-  resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.3.tgz#20fb54b14e14b3fb6edb6aca3571fd2143db44e6"
-  dependencies:
-    debug "^3.1.0"
-    istanbul-lib-coverage "^1.1.2"
-    mkdirp "^0.5.1"
-    rimraf "^2.6.1"
-    source-map "^0.5.3"
-
-istanbul-lib-source-maps@^1.2.4:
-  version "1.2.5"
-  resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.5.tgz#ffe6be4e7ab86d3603e4290d54990b14506fc9b1"
+istanbul-lib-source-maps@^1.1.0, istanbul-lib-source-maps@^1.2.6:
+  version "1.2.6"
+  resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.6.tgz#37b9ff661580f8fca11232752ee42e08c6675d8f"
   dependencies:
     debug "^3.1.0"
-    istanbul-lib-coverage "^1.2.0"
+    istanbul-lib-coverage "^1.2.1"
     mkdirp "^0.5.1"
     rimraf "^2.6.1"
     source-map "^0.5.3"
 
-istanbul-reports@^1.3.0:
-  version "1.3.0"
-  resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-1.3.0.tgz#2f322e81e1d9520767597dca3c20a0cce89a3554"
+istanbul-reports@^1.5.1:
+  version "1.5.1"
+  resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-1.5.1.tgz#97e4dbf3b515e8c484caea15d6524eebd3ff4e1a"
   dependencies:
     handlebars "^4.0.3"
 
@@ -4460,8 +4565,8 @@ jest@20.0.4:
     jest-cli "^20.0.4"
 
 js-base64@^2.1.9:
-  version "2.4.8"
-  resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.4.8.tgz#57a9b130888f956834aa40c5b165ba59c758f033"
+  version "2.5.0"
+  resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.5.0.tgz#42255ba183ab67ce59a0dee640afdc00ab5ae93e"
 
 js-tokens@^3.0.0, js-tokens@^3.0.2:
   version "3.0.2"
@@ -4472,8 +4577,8 @@ js-tokens@^3.0.0, js-tokens@^3.0.2:
   resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
 
 js-yaml@^3.4.3, js-yaml@^3.7.0, js-yaml@^3.9.1:
-  version "3.12.0"
-  resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.12.0.tgz#eaed656ec8344f10f527c6bfa1b6e2244de167d1"
+  version "3.12.1"
+  resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.12.1.tgz#295c8632a18a23e054cf5c9d3cecafe678167600"
   dependencies:
     argparse "^1.0.7"
     esprima "^4.0.0"
@@ -4518,8 +4623,8 @@ jsesc@^1.3.0:
   resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b"
 
 jsesc@^2.5.1:
-  version "2.5.1"
-  resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.1.tgz#e421a2a8e20d6b0819df28908f782526b96dd1fe"
+  version "2.5.2"
+  resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4"
 
 jsesc@~0.5.0:
   version "0.5.0"
@@ -4563,6 +4668,12 @@ json5@^0.5.0, json5@^0.5.1:
   version "0.5.1"
   resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821"
 
+json5@^1.0.1:
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe"
+  dependencies:
+    minimist "^1.2.0"
+
 jsonfile@^2.1.0:
   version "2.4.0"
   resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-2.4.0.tgz#3736a2b428b87bbda0cc83b53fa3d633a35c2ae8"
@@ -4588,32 +4699,16 @@ jsprim@^1.2.2:
     json-schema "0.2.3"
     verror "1.10.0"
 
-jss-camel-case@^6.0.0, jss-camel-case@^6.1.0:
+jss-camel-case@^6.0.0:
   version "6.1.0"
   resolved "https://registry.yarnpkg.com/jss-camel-case/-/jss-camel-case-6.1.0.tgz#ccb1ff8d6c701c02a1fed6fb6fb6b7896e11ce44"
   dependencies:
     hyphenate-style-name "^1.0.2"
 
-jss-compose@^5.0.0:
-  version "5.0.0"
-  resolved "https://registry.yarnpkg.com/jss-compose/-/jss-compose-5.0.0.tgz#ce01b2e4521d65c37ea42cf49116e5f7ab596484"
-  dependencies:
-    warning "^3.0.0"
-
 jss-default-unit@^8.0.2:
   version "8.0.2"
   resolved "https://registry.yarnpkg.com/jss-default-unit/-/jss-default-unit-8.0.2.tgz#cc1e889bae4c0b9419327b314ab1c8e2826890e6"
 
-jss-expand@^5.3.0:
-  version "5.3.0"
-  resolved "https://registry.yarnpkg.com/jss-expand/-/jss-expand-5.3.0.tgz#02be076efe650125c842f5bb6fb68786fe441ed6"
-
-jss-extend@^6.2.0:
-  version "6.2.0"
-  resolved "https://registry.yarnpkg.com/jss-extend/-/jss-extend-6.2.0.tgz#4af09d0b72fb98ee229970f8ca852fec1ca2a8dc"
-  dependencies:
-    warning "^3.0.0"
-
 jss-global@^3.0.0:
   version "3.0.0"
   resolved "https://registry.yarnpkg.com/jss-global/-/jss-global-3.0.0.tgz#e19e5c91ab2b96353c227e30aa2cbd938cdaafa2"
@@ -4624,38 +4719,17 @@ jss-nested@^6.0.1:
   dependencies:
     warning "^3.0.0"
 
-jss-preset-default@^4.3.0:
-  version "4.5.0"
-  resolved "https://registry.yarnpkg.com/jss-preset-default/-/jss-preset-default-4.5.0.tgz#d3a457012ccd7a551312014e394c23c4b301cadd"
-  dependencies:
-    jss-camel-case "^6.1.0"
-    jss-compose "^5.0.0"
-    jss-default-unit "^8.0.2"
-    jss-expand "^5.3.0"
-    jss-extend "^6.2.0"
-    jss-global "^3.0.0"
-    jss-nested "^6.0.1"
-    jss-props-sort "^6.0.0"
-    jss-template "^1.0.1"
-    jss-vendor-prefixer "^7.0.0"
-
 jss-props-sort@^6.0.0:
   version "6.0.0"
   resolved "https://registry.yarnpkg.com/jss-props-sort/-/jss-props-sort-6.0.0.tgz#9105101a3b5071fab61e2d85ea74cc22e9b16323"
 
-jss-template@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/jss-template/-/jss-template-1.0.1.tgz#09aed9d86cc547b07f53ef355d7e1777f7da430a"
-  dependencies:
-    warning "^3.0.0"
-
 jss-vendor-prefixer@^7.0.0:
   version "7.0.0"
   resolved "https://registry.yarnpkg.com/jss-vendor-prefixer/-/jss-vendor-prefixer-7.0.0.tgz#0166729650015ef19d9f02437c73667231605c71"
   dependencies:
     css-vendor "^0.3.8"
 
-jss@^9.3.3, jss@^9.7.0:
+jss@^9.8.7:
   version "9.8.7"
   resolved "https://registry.yarnpkg.com/jss/-/jss-9.8.7.tgz#ed9763fc0f2f0260fc8260dac657af61e622ce05"
   dependencies:
@@ -4673,13 +4747,13 @@ jsx-ast-utils@^2.0.0, jsx-ast-utils@^2.0.1:
   dependencies:
     array-includes "^3.0.3"
 
-keycode@^2.1.9:
+keycode@^2.1.7, keycode@^2.1.9:
   version "2.2.0"
   resolved "https://registry.yarnpkg.com/keycode/-/keycode-2.2.0.tgz#3d0af56dc7b8b8e5cba8d0a97f107204eec22b04"
 
 killable@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.0.tgz#da8b84bd47de5395878f95d64d02f2449fe05e6b"
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892"
 
 kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0:
   version "3.2.2"
@@ -4761,8 +4835,8 @@ loader-fs-cache@^1.0.0:
     mkdirp "0.5.1"
 
 loader-runner@^2.3.0:
-  version "2.3.0"
-  resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.3.0.tgz#f482aea82d543e07921700d5a46ef26fdac6b8a2"
+  version "2.4.0"
+  resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357"
 
 loader-utils@^0.2.16:
   version "0.2.17"
@@ -4774,12 +4848,12 @@ loader-utils@^0.2.16:
     object-assign "^4.0.1"
 
 loader-utils@^1.0.2, loader-utils@^1.1.0:
-  version "1.1.0"
-  resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.1.0.tgz#c98aef488bcceda2ffb5e2de646d6a754429f5cd"
+  version "1.2.3"
+  resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7"
   dependencies:
-    big.js "^3.1.3"
+    big.js "^5.2.2"
     emojis-list "^2.0.0"
-    json5 "^0.5.0"
+    json5 "^1.0.1"
 
 locate-path@^2.0.0:
   version "2.0.0"
@@ -4837,9 +4911,9 @@ lodash.uniq@^4.5.0:
   version "4.5.0"
   resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
 
-"lodash@>=3.5 <5", lodash@^4.15.0, lodash@^4.17.10, lodash@^4.17.2, lodash@^4.17.3, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0:
-  version "4.17.10"
-  resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.10.tgz#1b7793cf7259ea38fb3661d4d38b3260af8ae4e7"
+"lodash@>=3.5 <5", lodash@^4.15.0, lodash@^4.16.2, lodash@^4.17.10, lodash@^4.17.2, lodash@^4.17.3, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0:
+  version "4.17.11"
+  resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d"
 
 loglevel@^1.4.1:
   version "1.6.1"
@@ -4849,7 +4923,7 @@ longest@^1.0.1:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097"
 
-loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3.1:
+loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3.1, loose-envify@^1.4.0:
   version "1.4.0"
   resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf"
   dependencies:
@@ -4871,8 +4945,8 @@ lowercase-keys@^1.0.0:
   resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f"
 
 lru-cache@^4.0.1:
-  version "4.1.3"
-  resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.3.tgz#a1175cf3496dfc8436c156c334b4955992bce69c"
+  version "4.1.5"
+  resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd"
   dependencies:
     pseudomap "^1.0.2"
     yallist "^2.1.2"
@@ -4903,24 +4977,25 @@ map-visit@^1.0.0:
   dependencies:
     object-visit "^1.0.0"
 
-marked@^0.5.0:
-  version "0.5.0"
-  resolved "https://registry.yarnpkg.com/marked/-/marked-0.5.0.tgz#9e590bad31584a48ff405b33ab1c0dd25172288e"
+marked@^0.6.0:
+  version "0.6.0"
+  resolved "https://registry.yarnpkg.com/marked/-/marked-0.6.0.tgz#a18d01cfdcf8d15c3c455b71c8329e5e0f01faa1"
 
 math-expression-evaluator@^1.2.14:
   version "1.2.17"
   resolved "https://registry.yarnpkg.com/math-expression-evaluator/-/math-expression-evaluator-1.2.17.tgz#de819fdbcd84dccd8fae59c6aeb79615b9d266ac"
 
 math-random@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/math-random/-/math-random-1.0.1.tgz#8b3aac588b8a66e4975e3cdea67f7bb329601fac"
+  version "1.0.4"
+  resolved "https://registry.yarnpkg.com/math-random/-/math-random-1.0.4.tgz#5dd6943c938548267016d4e34f057583080c514c"
 
 md5.js@^1.3.4:
-  version "1.3.4"
-  resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.4.tgz#e9bdbde94a20a5ac18b04340fc5764d5b09d901d"
+  version "1.3.5"
+  resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f"
   dependencies:
     hash-base "^3.0.0"
     inherits "^2.0.1"
+    safe-buffer "^5.1.2"
 
 media-typer@0.3.0:
   version "0.3.0"
@@ -4959,8 +5034,8 @@ merge-descriptors@1.0.1:
   resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
 
 merge@^1.2.0:
-  version "1.2.0"
-  resolved "https://registry.yarnpkg.com/merge/-/merge-1.2.0.tgz#7531e39d4949c281a66b8c5a6e0265e8b05894da"
+  version "1.2.1"
+  resolved "https://registry.yarnpkg.com/merge/-/merge-1.2.1.tgz#38bebf80c3220a8a487b6fcfb3941bb11720c145"
 
 methods@~1.1.2:
   version "1.1.2"
@@ -4984,7 +5059,7 @@ micromatch@^2.1.5, micromatch@^2.3.11:
     parse-glob "^3.0.4"
     regex-cache "^0.4.2"
 
-micromatch@^3.1.4, micromatch@^3.1.8:
+micromatch@^3.1.10, micromatch@^3.1.4:
   version "3.1.10"
   resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23"
   dependencies:
@@ -5009,29 +5084,25 @@ miller-rabin@^4.0.0:
     bn.js "^4.0.0"
     brorand "^1.0.1"
 
-"mime-db@>= 1.34.0 < 2":
-  version "1.36.0"
-  resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.36.0.tgz#5020478db3c7fe93aad7bbcc4dcf869c43363397"
+"mime-db@>= 1.36.0 < 2", mime-db@~1.37.0:
+  version "1.37.0"
+  resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.37.0.tgz#0b6a0ce6fdbe9576e25f1f2d2fde8830dc0ad0d8"
 
 mime-db@~1.33.0:
   version "1.33.0"
   resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.33.0.tgz#a3492050a5cb9b63450541e39d9788d2272783db"
 
-mime-db@~1.35.0:
-  version "1.35.0"
-  resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.35.0.tgz#0569d657466491283709663ad379a99b90d9ab47"
-
 mime-types@2.1.18:
   version "2.1.18"
   resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.18.tgz#6f323f60a83d11146f831ff11fd66e2fe5503bb8"
   dependencies:
     mime-db "~1.33.0"
 
-mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.18, mime-types@~2.1.19:
-  version "2.1.19"
-  resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.19.tgz#71e464537a7ef81c15f2db9d97e913fc0ff606f0"
+mime-types@^2.1.11, mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.18, mime-types@~2.1.19:
+  version "2.1.21"
+  resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.21.tgz#28995aa1ecb770742fe6ae7e58f9181c744b3f96"
   dependencies:
-    mime-db "~1.35.0"
+    mime-db "~1.37.0"
 
 mime@1.4.1:
   version "1.4.1"
@@ -5077,16 +5148,16 @@ minimist@~0.0.1:
   version "0.0.10"
   resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf"
 
-minipass@^2.2.1, minipass@^2.3.3:
-  version "2.3.4"
-  resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.3.4.tgz#4768d7605ed6194d6d576169b9e12ef71e9d9957"
+minipass@^2.2.1, minipass@^2.3.4:
+  version "2.3.5"
+  resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.3.5.tgz#cacebe492022497f656b0f0f51e2682a9ed2d848"
   dependencies:
     safe-buffer "^5.1.2"
     yallist "^3.0.0"
 
-minizlib@^1.1.0:
-  version "1.1.0"
-  resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.1.0.tgz#11e13658ce46bc3a70a267aac58359d1e0c29ceb"
+minizlib@^1.1.1:
+  version "1.2.1"
+  resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.2.1.tgz#dd27ea6136243c7c880684e8672bb3a45fd9b614"
   dependencies:
     minipass "^2.2.1"
 
@@ -5107,6 +5178,10 @@ ms@2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
 
+ms@^2.1.1:
+  version "2.1.1"
+  resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a"
+
 multicast-dns-service-types@^1.1.0:
   version "1.1.0"
   resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901"
@@ -5123,8 +5198,8 @@ mute-stream@0.0.7:
   resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab"
 
 nan@^2.9.2:
-  version "2.10.0"
-  resolved "https://registry.yarnpkg.com/nan/-/nan-2.10.0.tgz#96d0cd610ebd58d4b4de9cc0c6828cda99c7548f"
+  version "2.12.1"
+  resolved "https://registry.yarnpkg.com/nan/-/nan-2.12.1.tgz#7b1aa193e9aa86057e3c7bbd0ac448e770925552"
 
 nanomatch@^1.2.9:
   version "1.2.13"
@@ -5147,8 +5222,8 @@ natural-compare@^1.4.0:
   resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
 
 needle@^2.2.1:
-  version "2.2.2"
-  resolved "https://registry.yarnpkg.com/needle/-/needle-2.2.2.tgz#1120ca4c41f2fcc6976fd28a8968afe239929418"
+  version "2.2.4"
+  resolved "https://registry.yarnpkg.com/needle/-/needle-2.2.4.tgz#51931bff82533b1928b7d1d69e01f1b00ffd2a4e"
   dependencies:
     debug "^2.1.2"
     iconv-lite "^0.4.4"
@@ -5159,8 +5234,8 @@ negotiator@0.6.1:
   resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9"
 
 neo-async@^2.5.0:
-  version "2.5.2"
-  resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.5.2.tgz#489105ce7bc54e709d736b195f82135048c50fcc"
+  version "2.6.0"
+  resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.0.tgz#b9d15e4d71c6762908654b5183ed38b753340835"
 
 next-tick@1:
   version "1.0.0"
@@ -5172,7 +5247,7 @@ no-case@^2.2.0:
   dependencies:
     lower-case "^1.1.1"
 
-node-fetch@^1.0.1:
+node-fetch@1.7.3, node-fetch@^1.0.1:
   version "1.7.3"
   resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-1.7.3.tgz#980f6f72d85211a5347c6b2bc18c5b84c3eb47ef"
   dependencies:
@@ -5216,11 +5291,11 @@ node-libs-browser@^2.0.0:
     vm-browserify "0.0.4"
 
 node-notifier@^5.0.2:
-  version "5.2.1"
-  resolved "https://registry.yarnpkg.com/node-notifier/-/node-notifier-5.2.1.tgz#fa313dd08f5517db0e2502e5758d664ac69f9dea"
+  version "5.3.0"
+  resolved "https://registry.yarnpkg.com/node-notifier/-/node-notifier-5.3.0.tgz#c77a4a7b84038733d5fb351aafd8a268bfe19a01"
   dependencies:
     growly "^1.3.0"
-    semver "^5.4.1"
+    semver "^5.5.0"
     shellwords "^0.1.1"
     which "^1.3.0"
 
@@ -5283,8 +5358,8 @@ npm-bundled@^1.0.1:
   resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.5.tgz#3c1732b7ba936b3a10325aef616467c0ccbcc979"
 
 npm-packlist@^1.1.6:
-  version "1.1.11"
-  resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.1.11.tgz#84e8c683cbe7867d34b1d357d893ce29e28a02de"
+  version "1.2.0"
+  resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.2.0.tgz#55a60e793e272f00862c7089274439a4cc31fc7f"
   dependencies:
     ignore-walk "^3.0.1"
     npm-bundled "^1.0.1"
@@ -5304,13 +5379,9 @@ npmlog@^4.0.2:
     gauge "~2.7.3"
     set-blocking "~2.0.0"
 
-nprogress@^0.2.0:
-  version "0.2.0"
-  resolved "https://registry.yarnpkg.com/nprogress/-/nprogress-0.2.0.tgz#cb8f34c53213d895723fcbab907e9422adbcafb1"
-
 nth-check@~1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.1.tgz#9929acdf628fc2c41098deab82ac580cf149aae4"
+  version "1.0.2"
+  resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c"
   dependencies:
     boolbase "~1.0.0"
 
@@ -5343,8 +5414,8 @@ object-copy@^0.1.0:
     kind-of "^3.0.3"
 
 object-hash@^1.1.4:
-  version "1.3.0"
-  resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-1.3.0.tgz#76d9ba6ff113cf8efc0d996102851fe6723963e2"
+  version "1.3.1"
+  resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-1.3.1.tgz#fde452098a951cb145f039bb7d455449ddc126df"
 
 object-keys@^1.0.12:
   version "1.0.12"
@@ -5356,6 +5427,15 @@ object-visit@^1.0.0:
   dependencies:
     isobject "^3.0.0"
 
+object.fromentries@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.0.tgz#49a543d92151f8277b3ac9600f1e930b189d30ab"
+  dependencies:
+    define-properties "^1.1.2"
+    es-abstract "^1.11.0"
+    function-bind "^1.1.1"
+    has "^1.0.1"
+
 object.omit@^2.0.0:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa"
@@ -5402,8 +5482,8 @@ opn@5.2.0:
     is-wsl "^1.1.0"
 
 opn@^5.1.0:
-  version "5.3.0"
-  resolved "https://registry.yarnpkg.com/opn/-/opn-5.3.0.tgz#64871565c863875f052cfdf53d3e3cb5adb53b1c"
+  version "5.4.0"
+  resolved "https://registry.yarnpkg.com/opn/-/opn-5.4.0.tgz#cb545e7aab78562beb11aa3bfabc7042e1761035"
   dependencies:
     is-wsl "^1.1.0"
 
@@ -5498,8 +5578,8 @@ package-json@^4.0.0:
     semver "^5.1.0"
 
 pako@~1.0.5:
-  version "1.0.6"
-  resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.6.tgz#0101211baa70c4bca4a0f63f2206e97b7dfaf258"
+  version "1.0.8"
+  resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.8.tgz#6844890aab9c635af868ad5fecc62e8acbba3ea4"
 
 param-case@2.1.x:
   version "2.1.1"
@@ -5578,7 +5658,7 @@ path-key@^2.0.0:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
 
-path-parse@^1.0.5:
+path-parse@^1.0.5, path-parse@^1.0.6:
   version "1.0.6"
   resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c"
 
@@ -5611,8 +5691,8 @@ path-type@^2.0.0:
     pify "^2.0.0"
 
 pbkdf2@^3.0.3:
-  version "3.0.16"
-  resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.16.tgz#7404208ec6b01b62d85bf83853a8064f8d9c2a5c"
+  version "3.0.17"
+  resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6"
   dependencies:
     create-hash "^1.1.2"
     create-hmac "^1.1.4"
@@ -5659,12 +5739,12 @@ pluralize@^7.0.0:
   resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-7.0.0.tgz#298b89df8b93b0221dbf421ad2b1b1ea23fc6777"
 
 popper.js@^1.14.1:
-  version "1.14.4"
-  resolved "https://registry.yarnpkg.com/popper.js/-/popper.js-1.14.4.tgz#8eec1d8ff02a5a3a152dd43414a15c7b79fd69b6"
+  version "1.14.6"
+  resolved "https://registry.yarnpkg.com/popper.js/-/popper.js-1.14.6.tgz#ab20dd4edf9288b8b3b6531c47c361107b60b4b0"
 
 portfinder@^1.0.9:
-  version "1.0.17"
-  resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.17.tgz#a8a1691143e46c4735edefcf4fbcccedad26456a"
+  version "1.0.20"
+  resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.20.tgz#bea68632e54b2e13ab7b0c4775e9b41bf270e44a"
   dependencies:
     async "^1.5.2"
     debug "^2.2.0"
@@ -5834,8 +5914,8 @@ postcss-minify-selectors@^2.0.4:
     postcss-selector-parser "^2.0.0"
 
 postcss-modules-extract-imports@^1.0.0:
-  version "1.1.0"
-  resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-1.1.0.tgz#b614c9720be6816eaee35fb3a5faa1dba6a05ddb"
+  version "1.2.1"
+  resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-1.2.1.tgz#dc87e34148ec7eab5f791f7cd5849833375b741a"
   dependencies:
     postcss "^6.0.1"
 
@@ -5929,8 +6009,8 @@ postcss-unique-selectors@^2.0.2:
     uniqs "^2.0.0"
 
 postcss-value-parser@^3.0.1, postcss-value-parser@^3.0.2, postcss-value-parser@^3.1.1, postcss-value-parser@^3.1.2, postcss-value-parser@^3.2.3, postcss-value-parser@^3.3.0:
-  version "3.3.0"
-  resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.0.tgz#87f38f9f18f774a4ab4c8a232f5c5ce8872a9d15"
+  version "3.3.1"
+  resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281"
 
 postcss-zindex@^2.0.1:
   version "2.2.0"
@@ -5987,12 +6067,6 @@ pretty-format@^20.0.3:
     ansi-regex "^2.1.1"
     ansi-styles "^3.0.0"
 
-prismjs@^1.8.4:
-  version "1.15.0"
-  resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.15.0.tgz#8801d332e472091ba8def94976c8877ad60398d9"
-  optionalDependencies:
-    clipboard "^2.0.0"
-
 private@^0.1.6, private@^0.1.7, private@^0.1.8:
   version "0.1.8"
   resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff"
@@ -6006,8 +6080,8 @@ process@^0.11.10:
   resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
 
 progress@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.0.tgz#8a1be366bf8fc23db2bd23f10c6fe920b4389d1f"
+  version "2.0.3"
+  resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8"
 
 promise@8.0.1:
   version "8.0.1"
@@ -6021,14 +6095,14 @@ promise@^7.1.1:
   dependencies:
     asap "~2.0.3"
 
-prop-types@^15.5.10, prop-types@^15.5.7, prop-types@^15.5.8, prop-types@^15.6.0, prop-types@^15.6.1, prop-types@^15.6.2:
+prop-types@^15.5.10, prop-types@^15.5.4, prop-types@^15.5.8, prop-types@^15.6.0, prop-types@^15.6.1, prop-types@^15.6.2:
   version "15.6.2"
   resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.6.2.tgz#05d5ca77b4453e985d60fc7ff8c859094a497102"
   dependencies:
     loose-envify "^1.3.1"
     object-assign "^4.1.1"
 
-proxy-addr@~2.0.3:
+proxy-addr@~2.0.4:
   version "2.0.4"
   resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.4.tgz#ecfc733bf22ff8c6f407fa275327b9ab67e48b93"
   dependencies:
@@ -6043,19 +6117,20 @@ pseudomap@^1.0.2:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3"
 
-psl@^1.1.24, psl@^1.1.7:
-  version "1.1.29"
-  resolved "https://registry.yarnpkg.com/psl/-/psl-1.1.29.tgz#60f580d360170bb722a797cc704411e6da850c67"
+psl@^1.1.24, psl@^1.1.28, psl@^1.1.7:
+  version "1.1.31"
+  resolved "https://registry.yarnpkg.com/psl/-/psl-1.1.31.tgz#e9aa86d0101b5b105cbe93ac6b784cd547276184"
 
 public-encrypt@^4.0.0:
-  version "4.0.2"
-  resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.2.tgz#46eb9107206bf73489f8b85b69d91334c6610994"
+  version "4.0.3"
+  resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0"
   dependencies:
     bn.js "^4.1.0"
     browserify-rsa "^4.0.0"
     create-hash "^1.1.0"
     parse-asn1 "^5.0.0"
     randombytes "^2.0.1"
+    safe-buffer "^5.1.2"
 
 punycode@1.3.2:
   version "1.3.2"
@@ -6065,7 +6140,7 @@ punycode@^1.2.4, punycode@^1.3.2, punycode@^1.4.1:
   version "1.4.1"
   resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e"
 
-punycode@^2.1.0:
+punycode@^2.1.0, punycode@^2.1.1:
   version "2.1.1"
   resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
 
@@ -6077,14 +6152,14 @@ q@^1.1.2:
   version "1.5.1"
   resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7"
 
-qs@6.5.1:
-  version "6.5.1"
-  resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8"
-
-qs@~6.5.2:
+qs@6.5.2, qs@~6.5.2:
   version "6.5.2"
   resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36"
 
+qs@^6.3.0:
+  version "6.6.0"
+  resolved "https://registry.yarnpkg.com/qs/-/qs-6.6.0.tgz#a99c0f69a8d26bf7ef012f871cdabb0aee4424c2"
+
 query-string@^4.1.0:
   version "4.3.4"
   resolved "https://registry.yarnpkg.com/query-string/-/query-string-4.3.4.tgz#bbb693b9ca915c232515b228b1a02b609043dbeb"
@@ -6092,6 +6167,10 @@ query-string@^4.1.0:
     object-assign "^4.1.0"
     strict-uri-encode "^1.0.0"
 
+querystring-browser@^1.0.4:
+  version "1.0.4"
+  resolved "https://registry.yarnpkg.com/querystring-browser/-/querystring-browser-1.0.4.tgz#f2e35881840a819bc7b1bf597faf0979e6622dc6"
+
 querystring-es3@^0.2.0:
   version "0.2.1"
   resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73"
@@ -6101,8 +6180,8 @@ querystring@0.2.0:
   resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620"
 
 querystringify@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.0.0.tgz#fa3ed6e68eb15159457c89b37bc6472833195755"
+  version "2.1.0"
+  resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.1.0.tgz#7ded8dfbf7879dcc60d0a644ac6754b283ad17ef"
 
 raf@3.4.0:
   version "3.4.0"
@@ -6110,13 +6189,13 @@ raf@3.4.0:
   dependencies:
     performance-now "^2.1.0"
 
-ramda@^0.25.0:
-  version "0.25.0"
-  resolved "https://registry.yarnpkg.com/ramda/-/ramda-0.25.0.tgz#8fdf68231cffa90bc2f9460390a0cb74a29b29a9"
+ramda@^0.26:
+  version "0.26.1"
+  resolved "https://registry.yarnpkg.com/ramda/-/ramda-0.26.1.tgz#8d41351eb8111c55353617fc3bbffad8e4d35d06"
 
 randomatic@^3.0.0:
-  version "3.1.0"
-  resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-3.1.0.tgz#36f2ca708e9e567f5ed2ec01949026d50aa10116"
+  version "3.1.1"
+  resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-3.1.1.tgz#b776efc59375984e36c537b2f51a1f0aff0da1ed"
   dependencies:
     is-number "^4.0.0"
     kind-of "^6.0.0"
@@ -6139,13 +6218,13 @@ range-parser@1.2.0, range-parser@^1.0.3, range-parser@~1.2.0:
   version "1.2.0"
   resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.0.tgz#f49be6b487894ddc40dcc94a322f611092e00d5e"
 
-raw-body@2.3.2:
-  version "2.3.2"
-  resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.3.2.tgz#bcd60c77d3eb93cde0050295c3f379389bc88f89"
+raw-body@2.3.3:
+  version "2.3.3"
+  resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.3.3.tgz#1b324ece6b5706e153855bc1148c65bb7f6ea0c3"
   dependencies:
     bytes "3.0.0"
-    http-errors "1.6.2"
-    iconv-lite "0.4.19"
+    http-errors "1.6.3"
+    iconv-lite "0.4.23"
     unpipe "1.0.0"
 
 rc@^1.0.1, rc@^1.1.6, rc@^1.2.7:
@@ -6166,6 +6245,14 @@ react-base16-styling@^0.6.0:
     lodash.flow "^3.3.0"
     pure-color "^1.2.0"
 
+react-cookie@^3.0.8:
+  version "3.0.8"
+  resolved "https://registry.yarnpkg.com/react-cookie/-/react-cookie-3.0.8.tgz#fd413d9940d5f2397700548e3d1faed0330e8bfd"
+  dependencies:
+    "@types/hoist-non-react-statics" "^3.0.1"
+    hoist-non-react-statics "^3.0.0"
+    universal-cookie "^3.0.7"
+
 react-copy-to-clipboard@^5.0.1:
   version "5.0.1"
   resolved "https://registry.yarnpkg.com/react-copy-to-clipboard/-/react-copy-to-clipboard-5.0.1.tgz#8eae107bb400be73132ed3b6a7b4fb156090208e"
@@ -6174,8 +6261,8 @@ react-copy-to-clipboard@^5.0.1:
     prop-types "^15.5.8"
 
 react-dev-utils@^5.0.1:
-  version "5.0.1"
-  resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-5.0.1.tgz#1f396e161fe44b595db1b186a40067289bf06613"
+  version "5.0.3"
+  resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-5.0.3.tgz#92f97668f03deb09d7fa11ea288832a8c756e35e"
   dependencies:
     address "1.0.3"
     babel-code-frame "6.26.0"
@@ -6189,38 +6276,38 @@ react-dev-utils@^5.0.1:
     inquirer "3.3.0"
     is-root "1.0.0"
     opn "5.2.0"
-    react-error-overlay "^4.0.0"
+    react-error-overlay "^4.0.1"
     recursive-readdir "2.2.1"
     shell-quote "1.6.1"
-    sockjs-client "1.1.4"
+    sockjs-client "1.1.5"
     strip-ansi "3.0.1"
     text-table "0.2.0"
 
 react-dom@^16.4.2:
-  version "16.4.2"
-  resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-16.4.2.tgz#4afed569689f2c561d2b8da0b819669c38a0bda4"
+  version "16.7.0"
+  resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-16.7.0.tgz#a17b2a7ca89ee7390bc1ed5eb81783c7461748b8"
   dependencies:
-    fbjs "^0.8.16"
     loose-envify "^1.1.0"
     object-assign "^4.1.1"
-    prop-types "^15.6.0"
+    prop-types "^15.6.2"
+    scheduler "^0.12.0"
 
 react-dropzone@^5.0.1:
-  version "5.0.1"
-  resolved "https://registry.yarnpkg.com/react-dropzone/-/react-dropzone-5.0.1.tgz#3ed201215794c0f650c6f25a8311a9d96d35ebb6"
+  version "5.1.1"
+  resolved "https://registry.yarnpkg.com/react-dropzone/-/react-dropzone-5.1.1.tgz#b05613ea22f1ab71aa1f7cf5367df7b19468a2f3"
   dependencies:
     attr-accept "^1.1.3"
-    prop-types "^15.5.7"
+    prop-types "^15.6.2"
 
-react-error-overlay@^4.0.0:
-  version "4.0.0"
-  resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-4.0.0.tgz#d198408a85b4070937a98667f500c832f86bd5d4"
+react-error-overlay@^4.0.1:
+  version "4.0.1"
+  resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-4.0.1.tgz#417addb0814a90f3a7082eacba7cee588d00da89"
 
-react-event-listener@^0.6.2:
-  version "0.6.3"
-  resolved "https://registry.yarnpkg.com/react-event-listener/-/react-event-listener-0.6.3.tgz#8eab88129a76e095ed8aa684c29679eded1e843d"
+react-event-listener@^0.6.0, react-event-listener@^0.6.2:
+  version "0.6.5"
+  resolved "https://registry.yarnpkg.com/react-event-listener/-/react-event-listener-0.6.5.tgz#d374dbe5da485c9f9d4702f0e76971afbe9b6b2e"
   dependencies:
-    "@babel/runtime" "7.0.0-rc.1"
+    "@babel/runtime" "7.2.0"
     prop-types "^15.6.0"
     warning "^4.0.1"
 
@@ -6230,6 +6317,10 @@ react-highlight@^0.12.0:
   dependencies:
     highlight.js "^9.11.0"
 
+react-is@^16.3.2, react-is@^16.6.3, react-is@^16.7.0:
+  version "16.7.0"
+  resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.7.0.tgz#c1bd21c64f1f1364c6f70695ec02d69392f41bfa"
+
 react-json-view@^1.19.1:
   version "1.19.1"
   resolved "https://registry.yarnpkg.com/react-json-view/-/react-json-view-1.19.1.tgz#95d8e59e024f08a25e5dc8f076ae304eed97cf5c"
@@ -6239,16 +6330,6 @@ react-json-view@^1.19.1:
     react-lifecycles-compat "^3.0.4"
     react-textarea-autosize "^6.1.0"
 
-react-jss@^8.1.0:
-  version "8.6.1"
-  resolved "https://registry.yarnpkg.com/react-jss/-/react-jss-8.6.1.tgz#a06e2e1d2c4d91b4d11befda865e6c07fbd75252"
-  dependencies:
-    hoist-non-react-statics "^2.5.0"
-    jss "^9.7.0"
-    jss-preset-default "^4.3.0"
-    prop-types "^15.6.0"
-    theming "^1.3.0"
-
 react-lifecycles-compat@^3.0.2, react-lifecycles-compat@^3.0.4:
   version "3.0.4"
   resolved "https://registry.yarnpkg.com/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz#4f1a273afdfc8f3488a8c516bfda78f872352362"
@@ -6265,8 +6346,8 @@ react-router-dom@^4.3.1:
     warning "^4.0.1"
 
 react-router-hash-link@^1.2.0:
-  version "1.2.0"
-  resolved "https://registry.yarnpkg.com/react-router-hash-link/-/react-router-hash-link-1.2.0.tgz#ce824cc5f0502ce9b0686bb6dd9c08659b24094c"
+  version "1.2.1"
+  resolved "https://registry.yarnpkg.com/react-router-hash-link/-/react-router-hash-link-1.2.1.tgz#da3b6384e5bff90e9b2172d2e689a994646f2f45"
   dependencies:
     prop-types "^15.6.0"
 
@@ -6327,6 +6408,35 @@ react-scripts@1.1.4:
   optionalDependencies:
     fsevents "^1.1.3"
 
+react-swipeable-views-core@^0.13.0:
+  version "0.13.0"
+  resolved "https://registry.yarnpkg.com/react-swipeable-views-core/-/react-swipeable-views-core-0.13.0.tgz#6bf8a8132a756355444537672a14e84b1e3b53c2"
+  dependencies:
+    "@babel/runtime" "7.0.0"
+    warning "^4.0.1"
+
+react-swipeable-views-utils@^0.13.0:
+  version "0.13.0"
+  resolved "https://registry.yarnpkg.com/react-swipeable-views-utils/-/react-swipeable-views-utils-0.13.0.tgz#0ea17aa67f88a69d534c79d591f8d82ef98346a4"
+  dependencies:
+    "@babel/runtime" "7.0.0"
+    fbjs "^0.8.4"
+    keycode "^2.1.7"
+    prop-types "^15.6.0"
+    react-event-listener "^0.6.0"
+    react-swipeable-views-core "^0.13.0"
+
+react-swipeable-views@^0.13.0:
+  version "0.13.0"
+  resolved "https://registry.yarnpkg.com/react-swipeable-views/-/react-swipeable-views-0.13.0.tgz#a200cef1005d55af6a27b97048afe9a4056e0ab8"
+  dependencies:
+    "@babel/runtime" "7.0.0"
+    dom-helpers "^3.2.1"
+    prop-types "^15.5.4"
+    react-swipeable-views-core "^0.13.0"
+    react-swipeable-views-utils "^0.13.0"
+    warning "^4.0.1"
+
 react-textarea-autosize@^6.1.0:
   version "6.1.0"
   resolved "https://registry.yarnpkg.com/react-textarea-autosize/-/react-textarea-autosize-6.1.0.tgz#df91387f8a8f22020b77e3833c09829d706a09a5"
@@ -6334,22 +6444,22 @@ react-textarea-autosize@^6.1.0:
     prop-types "^15.6.0"
 
 react-transition-group@^2.2.1:
-  version "2.4.0"
-  resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-2.4.0.tgz#1d9391fabfd82e016f26fabd1eec329dbd922b5a"
+  version "2.5.3"
+  resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-2.5.3.tgz#26de363cab19e5c88ae5dbae105c706cf953bb92"
   dependencies:
     dom-helpers "^3.3.1"
-    loose-envify "^1.3.1"
+    loose-envify "^1.4.0"
     prop-types "^15.6.2"
     react-lifecycles-compat "^3.0.4"
 
 react@^16.4.2:
-  version "16.4.2"
-  resolved "https://registry.yarnpkg.com/react/-/react-16.4.2.tgz#2cd90154e3a9d9dd8da2991149fdca3c260e129f"
+  version "16.7.0"
+  resolved "https://registry.yarnpkg.com/react/-/react-16.7.0.tgz#b674ec396b0a5715873b350446f7ea0802ab6381"
   dependencies:
-    fbjs "^0.8.16"
     loose-envify "^1.1.0"
     object-assign "^4.1.1"
-    prop-types "^15.6.0"
+    prop-types "^15.6.2"
+    scheduler "^0.12.0"
 
 read-pkg-up@^1.0.1:
   version "1.0.1"
@@ -6402,16 +6512,34 @@ readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable
     string_decoder "~1.1.1"
     util-deprecate "~1.0.1"
 
+readable-stream@^3.0.6:
+  version "3.1.1"
+  resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.1.1.tgz#ed6bbc6c5ba58b090039ff18ce670515795aeb06"
+  dependencies:
+    inherits "^2.0.3"
+    string_decoder "^1.1.1"
+    util-deprecate "^1.0.1"
+
 readdirp@^2.0.0:
-  version "2.1.0"
-  resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.1.0.tgz#4ed0ad060df3073300c48440373f72d1cc642d78"
+  version "2.2.1"
+  resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525"
   dependencies:
-    graceful-fs "^4.1.2"
-    minimatch "^3.0.2"
+    graceful-fs "^4.1.11"
+    micromatch "^3.1.10"
     readable-stream "^2.0.2"
-    set-immediate-shim "^1.0.1"
 
-recompose@^0.28.0, recompose@^0.28.2:
+"recompose@0.28.0 - 0.30.0":
+  version "0.30.0"
+  resolved "https://registry.yarnpkg.com/recompose/-/recompose-0.30.0.tgz#82773641b3927e8c7d24a0d87d65aeeba18aabd0"
+  dependencies:
+    "@babel/runtime" "^7.0.0"
+    change-emitter "^0.1.2"
+    fbjs "^0.8.1"
+    hoist-non-react-statics "^2.3.1"
+    react-lifecycles-compat "^3.0.2"
+    symbol-observable "^1.0.4"
+
+recompose@^0.28.2:
   version "0.28.2"
   resolved "https://registry.yarnpkg.com/recompose/-/recompose-0.28.2.tgz#19e679227bdf979e0d31b73ffe7ae38c9194f4a7"
   dependencies:
@@ -6486,6 +6614,10 @@ regexpp@^1.0.1:
   version "1.1.0"
   resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-1.1.0.tgz#0e3516dd0b7904f413d2d4193dce4618c3a689ab"
 
+regexpp@^2.0.1:
+  version "2.0.1"
+  resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f"
+
 regexpu-core@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-1.0.0.tgz#86a763f58ee4d7c2f6b102e4764050de7ed90c6b"
@@ -6534,14 +6666,14 @@ remove-trailing-separator@^1.0.1:
   resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
 
 renderkid@^2.0.1:
-  version "2.0.1"
-  resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.1.tgz#898cabfc8bede4b7b91135a3ffd323e58c0db319"
+  version "2.0.2"
+  resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.2.tgz#12d310f255360c07ad8fde253f6c9e9de372d2aa"
   dependencies:
     css-select "^1.1.0"
-    dom-converter "~0.1"
+    dom-converter "~0.2"
     htmlparser2 "~3.3.0"
     strip-ansi "^3.0.0"
-    utila "~0.3"
+    utila "^0.4.0"
 
 repeat-element@^1.1.2:
   version "1.1.3"
@@ -6644,11 +6776,11 @@ resolve@1.6.0:
   dependencies:
     path-parse "^1.0.5"
 
-resolve@^1.3.2, resolve@^1.5.0, resolve@^1.6.0:
-  version "1.8.1"
-  resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.8.1.tgz#82f1ec19a423ac1fbd080b0bab06ba36e84a7a26"
+resolve@^1.3.2, resolve@^1.5.0, resolve@^1.6.0, resolve@^1.8.1, resolve@^1.9.0:
+  version "1.9.0"
+  resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.9.0.tgz#a14c6fdfa8f92a7df1d996cb7105fa744658ea06"
   dependencies:
-    path-parse "^1.0.5"
+    path-parse "^1.0.6"
 
 restore-cursor@^2.0.0:
   version "2.0.0"
@@ -6661,21 +6793,17 @@ ret@~0.1.10:
   version "0.1.15"
   resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc"
 
-reverse-arguments@1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/reverse-arguments/-/reverse-arguments-1.0.0.tgz#c28095a3a921ac715d61834ddece9027992667cd"
-
 right-align@^0.1.1:
   version "0.1.3"
   resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef"
   dependencies:
     align-text "^0.1.1"
 
-rimraf@^2.2.8, rimraf@^2.6.1:
-  version "2.6.2"
-  resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36"
+rimraf@^2.2.8, rimraf@^2.6.1, rimraf@~2.6.2:
+  version "2.6.3"
+  resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab"
   dependencies:
-    glob "^7.0.5"
+    glob "^7.1.3"
 
 ripemd160@^2.0.0, ripemd160@^2.0.1:
   version "2.0.2"
@@ -6700,10 +6828,6 @@ rx-lite@*, rx-lite@^4.0.8:
   version "4.0.8"
   resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-4.0.8.tgz#0b1e11af8bc44836f04a6407e92da42467b79444"
 
-safe-buffer@5.1.1:
-  version "5.1.1"
-  resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853"
-
 safe-buffer@5.1.2, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
   version "5.1.2"
   resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
@@ -6734,6 +6858,13 @@ sax@^1.2.1, sax@^1.2.4, sax@~1.2.1:
   version "1.2.4"
   resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
 
+scheduler@^0.12.0:
+  version "0.12.0"
+  resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.12.0.tgz#8ab17699939c0aedc5a196a657743c496538647b"
+  dependencies:
+    loose-envify "^1.1.0"
+    object-assign "^4.1.1"
+
 schema-utils@^0.3.0:
   version "0.3.0"
   resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.3.0.tgz#f5877222ce3e931edae039f17eb3716e7137f8cf"
@@ -6744,13 +6875,9 @@ select-hose@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca"
 
-select@^1.1.2:
-  version "1.1.2"
-  resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d"
-
 selfsigned@^1.9.1:
-  version "1.10.3"
-  resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.3.tgz#d628ecf9e3735f84e8bafba936b3cf85bea43823"
+  version "1.10.4"
+  resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.4.tgz#cdd7eccfca4ed7635d47a08bf2d5d3074092e2cd"
   dependencies:
     node-forge "0.7.5"
 
@@ -6760,9 +6887,9 @@ semver-diff@^2.0.0:
   dependencies:
     semver "^5.0.3"
 
-"semver@2 || 3 || 4 || 5", semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@^5.4.1:
-  version "5.5.1"
-  resolved "https://registry.yarnpkg.com/semver/-/semver-5.5.1.tgz#7dfdd8814bdb7cabc7be0fb1d734cfb66c940477"
+"semver@2 || 3 || 4 || 5", semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@^5.5.0:
+  version "5.6.0"
+  resolved "https://registry.yarnpkg.com/semver/-/semver-5.6.0.tgz#7e74256fbaa49c75aa7c7a205cc22799cac80004"
 
 send@0.16.2:
   version "0.16.2"
@@ -6782,14 +6909,13 @@ send@0.16.2:
     range-parser "~1.2.0"
     statuses "~1.4.0"
 
-serve-handler@5.0.0:
-  version "5.0.0"
-  resolved "https://registry.yarnpkg.com/serve-handler/-/serve-handler-5.0.0.tgz#96d6cb86b5be52ada878287696b23174c7902e95"
+serve-handler@5.0.7:
+  version "5.0.7"
+  resolved "https://registry.yarnpkg.com/serve-handler/-/serve-handler-5.0.7.tgz#317877420925913e99e4dc228e67f6e5774e5387"
   dependencies:
     bytes "3.0.0"
     content-disposition "0.5.2"
     fast-url-parser "1.1.3"
-    glob-slash "1.0.0"
     mime-types "2.1.18"
     minimatch "3.0.4"
     path-is-inside "1.0.2"
@@ -6818,16 +6944,17 @@ serve-static@1.13.2:
     send "0.16.2"
 
 serve@^10.0.0:
-  version "10.0.0"
-  resolved "https://registry.yarnpkg.com/serve/-/serve-10.0.0.tgz#4d640167c88f07f1f730a52bb992e94d2e4a174c"
+  version "10.1.1"
+  resolved "https://registry.yarnpkg.com/serve/-/serve-10.1.1.tgz#29a0210fc6fc2d9bbd67e977a0e487deb0e86789"
   dependencies:
-    "@zeit/schemas" "2.1.1"
+    "@zeit/schemas" "2.6.0"
     ajv "6.5.3"
     arg "2.0.0"
     boxen "1.3.0"
     chalk "2.4.1"
     clipboardy "1.2.3"
-    serve-handler "5.0.0"
+    compression "1.7.3"
+    serve-handler "5.0.7"
     update-check "1.5.2"
 
 serviceworker-cache-polyfill@^4.0.0:
@@ -6838,10 +6965,6 @@ set-blocking@^2.0.0, set-blocking@~2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
 
-set-immediate-shim@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61"
-
 set-value@^0.4.3:
   version "0.4.3"
   resolved "https://registry.yarnpkg.com/set-value/-/set-value-0.4.3.tgz#7db08f9d3d22dc7f78e53af3c3bf4666ecdfccf1"
@@ -6864,10 +6987,6 @@ setimmediate@^1.0.4, setimmediate@^1.0.5:
   version "1.0.5"
   resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285"
 
-setprototypeof@1.0.3:
-  version "1.0.3"
-  resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.0.3.tgz#66567e37043eeb4f04d91bd658c0cbefb55b8e04"
-
 setprototypeof@1.1.0:
   version "1.1.0"
   resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656"
@@ -6954,6 +7073,17 @@ sockjs-client@1.1.4:
     json3 "^3.3.2"
     url-parse "^1.1.8"
 
+sockjs-client@1.1.5:
+  version "1.1.5"
+  resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.1.5.tgz#1bb7c0f7222c40f42adf14f4442cbd1269771a83"
+  dependencies:
+    debug "^2.6.6"
+    eventsource "0.1.6"
+    faye-websocket "~0.11.0"
+    inherits "^2.0.1"
+    json3 "^3.3.2"
+    url-parse "^1.1.8"
+
 sockjs@0.3.18:
   version "0.3.18"
   resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.18.tgz#d9b289316ca7df77595ef299e075f0f937eb4207"
@@ -6968,8 +7098,8 @@ sort-keys@^1.0.0:
     is-plain-obj "^1.0.0"
 
 source-list-map@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.0.tgz#aaa47403f7b245a92fbc97ea08f250d6087ed085"
+  version "2.0.1"
+  resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34"
 
 source-map-resolve@^0.5.0:
   version "0.5.2"
@@ -6991,12 +7121,6 @@ source-map-url@^0.4.0:
   version "0.4.0"
   resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3"
 
-source-map@^0.4.4:
-  version "0.4.4"
-  resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b"
-  dependencies:
-    amdefine ">=0.0.4"
-
 source-map@^0.5.0, source-map@^0.5.3, source-map@^0.5.6, source-map@^0.5.7, source-map@~0.5.1:
   version "0.5.7"
   resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
@@ -7006,15 +7130,15 @@ source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1:
   resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
 
 spdx-correct@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.0.0.tgz#05a5b4d7153a195bc92c3c425b69f3b2a9524c82"
+  version "3.1.0"
+  resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4"
   dependencies:
     spdx-expression-parse "^3.0.0"
     spdx-license-ids "^3.0.0"
 
 spdx-exceptions@^2.1.0:
-  version "2.1.0"
-  resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.1.0.tgz#2c7ae61056c714a5b9b9b2b2af7d311ef5c78fe9"
+  version "2.2.0"
+  resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977"
 
 spdx-expression-parse@^3.0.0:
   version "3.0.0"
@@ -7024,12 +7148,12 @@ spdx-expression-parse@^3.0.0:
     spdx-license-ids "^3.0.0"
 
 spdx-license-ids@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.0.tgz#7a7cd28470cc6d3a1cfe6d66886f6bc430d3ac87"
+  version "3.0.3"
+  resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.3.tgz#81c0ce8f21474756148bbb5f3bfc0f36bf15d76e"
 
 spdy-transport@^2.0.18:
-  version "2.1.0"
-  resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-2.1.0.tgz#4bbb15aaffed0beefdd56ad61dbdc8ba3e2cb7a1"
+  version "2.1.1"
+  resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-2.1.1.tgz#c54815d73858aadd06ce63001e7d25fa6441623b"
   dependencies:
     debug "^2.6.8"
     detect-node "^2.0.3"
@@ -7061,18 +7185,17 @@ sprintf-js@~1.0.2:
   resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
 
 sshpk@^1.7.0:
-  version "1.14.2"
-  resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.14.2.tgz#c6fc61648a3d9c4e764fd3fcdf4ea105e492ba98"
+  version "1.16.0"
+  resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.0.tgz#1d4963a2fbffe58050aa9084ca20be81741c07de"
   dependencies:
     asn1 "~0.2.3"
     assert-plus "^1.0.0"
-    dashdash "^1.12.0"
-    getpass "^0.1.1"
-    safer-buffer "^2.0.2"
-  optionalDependencies:
     bcrypt-pbkdf "^1.0.0"
+    dashdash "^1.12.0"
     ecc-jsbn "~0.1.1"
+    getpass "^0.1.1"
     jsbn "~0.1.0"
+    safer-buffer "^2.0.2"
     tweetnacl "~0.14.0"
 
 static-extend@^0.1.1:
@@ -7082,7 +7205,7 @@ static-extend@^0.1.1:
     define-property "^0.2.5"
     object-copy "^0.1.0"
 
-"statuses@>= 1.3.1 < 2", "statuses@>= 1.4.0 < 2":
+"statuses@>= 1.4.0 < 2":
   version "1.5.0"
   resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c"
 
@@ -7132,9 +7255,9 @@ string-width@^1.0.1, string-width@^1.0.2:
     is-fullwidth-code-point "^2.0.0"
     strip-ansi "^4.0.0"
 
-string_decoder@^1.0.0, string_decoder@~1.1.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
+string_decoder@^1.0.0, string_decoder@^1.1.1:
+  version "1.2.0"
+  resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.2.0.tgz#fe86e738b19544afe70469243b2a1ee9240eae8d"
   dependencies:
     safe-buffer "~5.1.0"
 
@@ -7142,6 +7265,12 @@ string_decoder@~0.10.x:
   version "0.10.31"
   resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94"
 
+string_decoder@~1.1.1:
+  version "1.1.1"
+  resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
+  dependencies:
+    safe-buffer "~5.1.0"
+
 strip-ansi@3.0.1, strip-ansi@^3.0.0, strip-ansi@^3.0.1:
   version "3.0.1"
   resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
@@ -7249,11 +7378,29 @@ sw-toolbox@^3.4.0:
     path-to-regexp "^1.0.1"
     serviceworker-cache-polyfill "^4.0.0"
 
-symbol-observable@1.0.4:
-  version "1.0.4"
-  resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.0.4.tgz#29bf615d4aa7121bdd898b22d4b3f9bc4e2aa03d"
+swagger-client@^3.8.22:
+  version "3.8.22"
+  resolved "https://registry.yarnpkg.com/swagger-client/-/swagger-client-3.8.22.tgz#934809e19acd09d5070fdb7ae48bd405d0a18b69"
+  dependencies:
+    "@kyleshockey/js-yaml" "^1.0.1"
+    "@kyleshockey/object-assign-deep" "^0.4.0"
+    babel-runtime "^6.26.0"
+    btoa "1.1.2"
+    buffer "^5.1.0"
+    cookie "^0.3.1"
+    cross-fetch "0.0.8"
+    deep-extend "^0.5.1"
+    encode-3986 "^1.0.0"
+    fast-json-patch "^2.0.6"
+    isomorphic-form-data "0.0.1"
+    lodash "^4.16.2"
+    qs "^6.3.0"
+    querystring-browser "^1.0.4"
+    url "^0.11.0"
+    utf8-bytes "0.0.1"
+    utfstring "^2.0.0"
 
-symbol-observable@^1.0.4, symbol-observable@^1.1.0:
+symbol-observable@1.2.0, symbol-observable@^1.0.4, symbol-observable@^1.1.0:
   version "1.2.0"
   resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804"
 
@@ -7284,17 +7431,17 @@ table@^4.0.1:
     string-width "^2.1.1"
 
 tapable@^0.2.7:
-  version "0.2.8"
-  resolved "https://registry.yarnpkg.com/tapable/-/tapable-0.2.8.tgz#99372a5c999bf2df160afc0d74bed4f47948cd22"
+  version "0.2.9"
+  resolved "https://registry.yarnpkg.com/tapable/-/tapable-0.2.9.tgz#af2d8bbc9b04f74ee17af2b4d9048f807acd18a8"
 
 tar@^4:
-  version "4.4.6"
-  resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.6.tgz#63110f09c00b4e60ac8bcfe1bf3c8660235fbc9b"
+  version "4.4.8"
+  resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.8.tgz#b19eec3fde2a96e64666df9fdb40c5ca1bc3747d"
   dependencies:
-    chownr "^1.0.1"
+    chownr "^1.1.1"
     fs-minipass "^1.2.5"
-    minipass "^2.3.3"
-    minizlib "^1.1.0"
+    minipass "^2.3.4"
+    minizlib "^1.1.1"
     mkdirp "^0.5.0"
     safe-buffer "^5.1.2"
     yallist "^3.0.2"
@@ -7306,11 +7453,11 @@ term-size@^1.2.0:
     execa "^0.7.0"
 
 test-exclude@^4.2.1:
-  version "4.2.1"
-  resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-4.2.1.tgz#dfa222f03480bca69207ca728b37d74b45f724fa"
+  version "4.2.3"
+  resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-4.2.3.tgz#a9a5e64474e4398339245a0a769ad7c2f4a97c20"
   dependencies:
     arrify "^1.0.1"
-    micromatch "^3.1.8"
+    micromatch "^2.3.11"
     object-assign "^4.1.0"
     read-pkg-up "^1.0.1"
     require-main-filename "^1.0.1"
@@ -7319,14 +7466,15 @@ text-table@0.2.0, text-table@~0.2.0:
   version "0.2.0"
   resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
 
-theming@^1.3.0:
-  version "1.3.0"
-  resolved "https://registry.yarnpkg.com/theming/-/theming-1.3.0.tgz#286d5bae80be890d0adc645e5ca0498723725bdc"
+three.js@^0.77.1:
+  version "0.77.1"
+  resolved "https://registry.yarnpkg.com/three.js/-/three.js-0.77.1.tgz#07858a8749e4b627a31083252a2d987776cca32a"
   dependencies:
-    brcast "^3.0.1"
-    is-function "^1.0.1"
-    is-plain-object "^2.0.1"
-    prop-types "^15.5.8"
+    three "0.77.0"
+
+three@0.77.0:
+  version "0.77.0"
+  resolved "https://registry.yarnpkg.com/three/-/three-0.77.0.tgz#8b0b1b00047689ceaa020d480a562ca60d87ff37"
 
 throat@^3.0.0:
   version "3.2.0"
@@ -7337,12 +7485,12 @@ through@^2.3.6:
   resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5"
 
 thunky@^1.0.2:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.0.2.tgz#a862e018e3fb1ea2ec3fce5d55605cf57f247371"
+  version "1.0.3"
+  resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.0.3.tgz#f5df732453407b09191dae73e2a8cc73f381a826"
 
 time-stamp@^2.0.0:
-  version "2.0.1"
-  resolved "https://registry.yarnpkg.com/time-stamp/-/time-stamp-2.0.1.tgz#708a89359c1fc50bd5e7b1c8aa750d08c9172232"
+  version "2.2.0"
+  resolved "https://registry.yarnpkg.com/time-stamp/-/time-stamp-2.2.0.tgz#917e0a66905688790ec7bbbde04046259af83f57"
 
 timed-out@^4.0.0:
   version "4.0.1"
@@ -7354,10 +7502,6 @@ timers-browserify@^2.0.4:
   dependencies:
     setimmediate "^1.0.4"
 
-tiny-emitter@^2.0.0:
-  version "2.0.2"
-  resolved "https://registry.yarnpkg.com/tiny-emitter/-/tiny-emitter-2.0.2.tgz#82d27468aca5ade8e5fd1e6d22b57dd43ebdfb7c"
-
 tmp@^0.0.33:
   version "0.0.33"
   resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9"
@@ -7410,7 +7554,14 @@ toposort@^1.0.0:
   version "1.0.7"
   resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029"
 
-tough-cookie@^2.3.2, tough-cookie@~2.4.3:
+tough-cookie@^2.3.2:
+  version "2.5.0"
+  resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2"
+  dependencies:
+    psl "^1.1.28"
+    punycode "^2.1.1"
+
+tough-cookie@~2.4.3:
   version "2.4.3"
   resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781"
   dependencies:
@@ -7449,7 +7600,7 @@ type-check@~0.3.2:
   dependencies:
     prelude-ls "~1.1.2"
 
-type-is@~1.6.15, type-is@~1.6.16:
+type-is@~1.6.16:
   version "1.6.16"
   resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.16.tgz#f89ce341541c672b25ee7ae3c73dee3b2be50194"
   dependencies:
@@ -7461,17 +7612,17 @@ typedarray@^0.0.6:
   resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
 
 ua-parser-js@^0.7.18:
-  version "0.7.18"
-  resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.18.tgz#a7bfd92f56edfb117083b69e31d2aa8882d4b1ed"
+  version "0.7.19"
+  resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.19.tgz#94151be4c0a7fb1d001af7022fdaca4642659e4b"
 
-uglify-js@3.4.x, uglify-js@^3.0.13:
-  version "3.4.7"
-  resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.7.tgz#4df6b92e54789aa921a254cb1e33704d6ec12b89"
+uglify-js@3.4.x, uglify-js@^3.0.13, uglify-js@^3.1.4:
+  version "3.4.9"
+  resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.9.tgz#af02f180c1207d76432e473ed24a28f4a782bae3"
   dependencies:
-    commander "~2.16.0"
+    commander "~2.17.1"
     source-map "~0.6.1"
 
-uglify-js@^2.6, uglify-js@^2.8.29:
+uglify-js@^2.8.29:
   version "2.8.29"
   resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd"
   dependencies:
@@ -7492,19 +7643,6 @@ uglifyjs-webpack-plugin@^0.4.6:
     uglify-js "^2.8.29"
     webpack-sources "^1.0.1"
 
-underscore.string.fp@^1.0.4:
-  version "1.0.4"
-  resolved "https://registry.yarnpkg.com/underscore.string.fp/-/underscore.string.fp-1.0.4.tgz#054b3f1843bcae561286c87de5e8879b4fc98364"
-  dependencies:
-    chickencurry "1.1.1"
-    compose-function "^2.0.0"
-    reverse-arguments "1.0.0"
-    underscore.string "3.0.3"
-
-underscore.string@3.0.3:
-  version "3.0.3"
-  resolved "https://registry.yarnpkg.com/underscore.string/-/underscore.string-3.0.3.tgz#4617b8c1a250cf6e5064fbbb363d0fa96cf14552"
-
 union-value@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.0.tgz#5c71c34cb5bad5dcebe3ea0cd08207ba5aa1aea4"
@@ -7528,6 +7666,15 @@ unique-string@^1.0.0:
   dependencies:
     crypto-random-string "^1.0.0"
 
+universal-cookie@^3.0.7:
+  version "3.0.7"
+  resolved "https://registry.yarnpkg.com/universal-cookie/-/universal-cookie-3.0.7.tgz#722e8bc455bb33ed3c74988344fad9d9bb88278e"
+  dependencies:
+    "@types/cookie" "^0.3.1"
+    "@types/object-assign" "^4.0.30"
+    cookie "^0.3.1"
+    object-assign "^4.1.0"
+
 universalify@^0.1.0:
   version "0.1.2"
   resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
@@ -7606,8 +7753,8 @@ url-parse-lax@^1.0.0:
     prepend-http "^1.0.1"
 
 url-parse@^1.1.8, url-parse@^1.4.3:
-  version "1.4.3"
-  resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.4.3.tgz#bfaee455c889023219d757e045fa6a684ec36c15"
+  version "1.4.4"
+  resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.4.4.tgz#cac1556e95faa0303691fec5cf9d5a1bc34648f8"
   dependencies:
     querystringify "^2.0.0"
     requires-port "^1.0.0"
@@ -7623,7 +7770,15 @@ use@^3.1.0:
   version "3.1.1"
   resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f"
 
-util-deprecate@~1.0.1:
+utf8-bytes@0.0.1:
+  version "0.0.1"
+  resolved "https://registry.yarnpkg.com/utf8-bytes/-/utf8-bytes-0.0.1.tgz#116b025448c9b500081cdfbf1f4d6c6c37d8837d"
+
+utfstring@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/utfstring/-/utfstring-2.0.0.tgz#b331f7351e9be1c46334cc7518826cda3b44242a"
+
+util-deprecate@^1.0.1, util-deprecate@~1.0.1:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
 
@@ -7639,11 +7794,7 @@ util@^0.10.3:
   dependencies:
     inherits "2.0.3"
 
-utila@~0.3:
-  version "0.3.3"
-  resolved "https://registry.yarnpkg.com/utila/-/utila-0.3.3.tgz#d7e8e7d7e309107092b05f8d9688824d633a4226"
-
-utila@~0.4:
+utila@^0.4.0, utila@~0.4:
   version "0.4.0"
   resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c"
 
@@ -7786,8 +7937,8 @@ webpack-manifest-plugin@1.3.2:
     lodash ">=3.5 <5"
 
 webpack-sources@^1.0.1:
-  version "1.1.0"
-  resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.1.0.tgz#a101ebae59d6507354d71d8013950a3a8b7a5a54"
+  version "1.3.0"
+  resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.3.0.tgz#2a28dcb9f1f45fe960d8f1493252b5ee6530fa85"
   dependencies:
     source-list-map "^2.0.0"
     source-map "~0.6.1"
@@ -7831,18 +7982,18 @@ websocket-extensions@>=0.1.1:
   resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.3.tgz#5d2ff22977003ec687a4b87073dfbbac146ccf29"
 
 whatwg-encoding@^1.0.1:
-  version "1.0.4"
-  resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.4.tgz#63fb016b7435b795d9025632c086a5209dbd2621"
+  version "1.0.5"
+  resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0"
   dependencies:
-    iconv-lite "0.4.23"
+    iconv-lite "0.4.24"
 
 whatwg-fetch@2.0.3:
   version "2.0.3"
   resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-2.0.3.tgz#9c84ec2dcf68187ff00bc64e1274b442176e1c84"
 
 whatwg-fetch@>=0.10.0:
-  version "2.0.4"
-  resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-2.0.4.tgz#dde6a5df315f9d39991aa17621853d720b85566f"
+  version "3.0.0"
+  resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.0.0.tgz#fc804e458cc460009b1a2b966bc8817d2578aefb"
 
 whatwg-url@^4.3.0:
   version "4.8.0"
@@ -7876,8 +8027,8 @@ wide-align@^1.1.0:
     string-width "^1.0.2 || 2"
 
 widest-line@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-2.0.0.tgz#0142a4e8a243f8882c0233aa0e0281aa76152273"
+  version "2.0.1"
+  resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-2.0.1.tgz#7438764730ec7ef4381ce4df82fb98a53142a3fc"
   dependencies:
     string-width "^2.1.1"
 
@@ -7949,8 +8100,8 @@ yallist@^2.1.2:
   resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52"
 
 yallist@^3.0.0, yallist@^3.0.2:
-  version "3.0.2"
-  resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.0.2.tgz#8452b4bb7e83c7c188d8041c1a837c773d6d8bb9"
+  version "3.0.3"
+  resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.0.3.tgz#b4b049e314be545e3ce802236d6cd22cd91c3de9"
 
 yargs-parser@^4.2.0:
   version "4.2.1"
diff --git a/nomad/api/__init__.py b/nomad/api/__init__.py
index e42758024b8462efde23a509fd2ab736a772983f..7a2055790569e82a62020903af2fbf01b32b84ca 100644
--- a/nomad/api/__init__.py
+++ b/nomad/api/__init__.py
@@ -24,12 +24,12 @@ There is a separate documentation for the API endpoints from a client perspectiv
 .. automodule:: nomad.api.app
 .. automodule:: nomad.api.auth
 .. automodule:: nomad.api.upload
-.. automodule:: nomad.api.repository
+.. automodule:: nomad.api.repo
 .. automodule:: nomad.api.archive
 .. automodule:: nomad.api.admin
 """
 from .app import app
-from . import auth, admin, upload, repository, archive, raw
+from . import auth, admin, upload, repo, archive, raw
 
 
 @app.before_first_request
diff --git a/nomad/api/__main__.py b/nomad/api/__main__.py
index 9113d57b0570829413e960eaeadb53c0c26ac716..3598823e3f94854985ac3ab0657fa1ad961f3024 100644
--- a/nomad/api/__main__.py
+++ b/nomad/api/__main__.py
@@ -12,8 +12,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import nomad.api
+from nomad.api import app
+
+
+def run_dev_server(*args, **kwargs):
+    app.run(*args, **kwargs)
 
 
 if __name__ == '__main__':
-    nomad.api.app.run(debug=True, port=8000)
+    run_dev_server(debug=True, port=8000)
diff --git a/nomad/api/admin.py b/nomad/api/admin.py
index e5f60efdaaad231f455cd91d7f367e1203b645cf..0627414c23b2c2e46d844598d3d26bdb5b0d8523 100644
--- a/nomad/api/admin.py
+++ b/nomad/api/admin.py
@@ -12,35 +12,61 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from flask_restful import abort
+from flask import g
+from flask_restplus import abort, Resource
 
-from nomad import infrastructure
-from nomad.processing import Upload
+from nomad import infrastructure, config
 
-from .app import app, base_path
+from .app import api
+from .auth import login_really_required
 
 
-# TODO in production this requires authorization
-@app.route('%s/admin/<string:operation>' % base_path, methods=['POST'])
-def call_admin_operation(operation):
-    """
-    Allows to perform administrative operations on the nomad services. The possible
-    operations are *repair_uploads*
-    (cleans incomplete or otherwise unexpectedly failed uploads), *reset* (clears all
-    databases and resets nomad).
+ns = api.namespace('admin', description='Administrative operations')
 
-    .. :quickref: Allows to perform administrative operations on the nomad services.
 
-    :param string operation: the operation to perform
-    :status 400: unknown operation
-    :status 200: operation successfully started
-    :returns: an authentication token that is valid for 10 minutes.
-    """
-    if operation == 'repair_uploads':
-        Upload.repair_all()
-    if operation == 'reset':
+@ns.route('/reset')
+class AdminRemoveResource(Resource):
+    @api.doc('exec_reset_command')
+    @api.response(200, 'Reset performed')
+    @api.response(400, 'Reset not available/disabled')
+    @login_really_required
+    def post(self):
+        """
+        The ``reset`` command will attempt to clear the contents of all databased and
+        indices.
+
+        Nomad can be configured to disable reset and the command might not be available.
+        """
+        if not g.user.is_admin:
+            abort(401, message='Only the admin user can perform reset.')
+
+        if config.services.disable_reset:
+            abort(400, message='Operation is disabled')
+
         infrastructure.reset()
-    else:
-        abort(400, message='Unknown operation %s' % operation)
 
-    return 'done', 200
+        return dict(messager='Reset performed.'), 200
+
+
+@ns.route('/remove')
+class AdminResetResource(Resource):
+    @api.doc('exec_remove_command')
+    @api.response(200, 'Remove performed')
+    @api.response(400, 'Remove not available/disabled')
+    @login_really_required
+    def post(self):
+        """
+        The ``remove``command will attempt to remove all databases. Expect the
+        api to stop functioning after this request.
+
+        Nomad can be configured to disable remove and the command might not be available.
+        """
+        if not g.user.is_admin:
+            abort(401, message='Only the admin user can perform remove.')
+
+        if config.services.disable_reset:
+            abort(400, message='Operation is disabled')
+
+        infrastructure.remove()
+
+        return dict(messager='Remove performed.'), 200
diff --git a/nomad/api/app.py b/nomad/api/app.py
index 95451fc6ad334e6a0183e9f9956464c4b0cc3160..69bc14a22487ab3d262b5b7d818e49a09afb046c 100644
--- a/nomad/api/app.py
+++ b/nomad/api/app.py
@@ -17,33 +17,59 @@ All APIs are served by one Flask app (:py:mod:`nomad.api.app`) under different p
 """
 
 from flask import Flask, jsonify
-from flask_restful import Api
+from flask_restplus import Api
 from flask_cors import CORS
 from werkzeug.exceptions import HTTPException
+from werkzeug.wsgi import DispatcherMiddleware
 import os.path
+import inspect
 
-from nomad import config
+from nomad import config, utils
 
 base_path = config.services.api_base_path
 """ Provides the root path of the nomad APIs. """
 
 app = Flask(
     __name__,
-    static_url_path='%s/docs' % base_path,
+    static_url_path='/docs',
     static_folder=os.path.abspath(os.path.join(os.path.dirname(__file__), '../../docs/.build/html')))
 """ The Flask app that serves all APIs. """
 
+app.config.APPLICATION_ROOT = base_path
+app.config.RESTPLUS_MASK_HEADER = False
+app.config.RESTPLUS_MASK_SWAGGER = False
+app.config.SWAGGER_UI_OPERATION_ID = True
+app.config.SWAGGER_UI_REQUEST_DURATION = True
+
+
+def api_base_path_response(env, resp):
+    resp(b'200 OK', [(b'Content-Type', b'text/plain')])
+    return [
+        ('Development nomad api server. Api is served under %s/.' %
+            config.services.api_base_path).encode('utf-8')]
+
+
+app.wsgi_app = DispatcherMiddleware(
+    api_base_path_response, {config.services.api_base_path: app.wsgi_app})
+
+
 CORS(app)
 
-api = Api(app)
-""" Provides the flask restful api instance """
+api = Api(
+    app, version='1.0', title='nomad@FAIRDI API',
+    description='Official API for nomad@FAIRDI services.',
+    validate=True)
+""" Provides the flask restplust api instance """
 
 
-@app.errorhandler(HTTPException)
-def handle(error):
+@app.errorhandler(Exception)
+@api.errorhandler
+def handle(error: Exception):
     status_code = getattr(error, 'code', 500)
+    if not isinstance(status_code, int):
+        status_code = 500
     name = getattr(error, 'name', 'Internal Server Error')
-    description = getattr(error, 'description', None)
+    description = getattr(error, 'description', 'No description available')
     data = dict(
         code=status_code,
         name=name,
@@ -51,4 +77,40 @@ def handle(error):
     data.update(getattr(error, 'data', []))
     response = jsonify(data)
     response.status_code = status_code
+    if status_code == 500:
+        utils.get_logger(__name__).error('internal server error', exc_info=error)
     return response
+
+
+def with_logger(func):
+    """
+    Decorator for endpoint implementations that provides a pre configured logger and
+    automatically logs errors on all 500 responses.
+    """
+    signature = inspect.signature(func)
+    has_logger = 'logger' in signature.parameters
+    wrapper_signature = signature.replace(parameters=tuple(
+        param for param in signature.parameters.values()
+        if param.name != 'logger'
+    ))
+
+    def wrapper(*args, **kwargs):
+        if has_logger:
+            args = inspect.getcallargs(wrapper, *args, **kwargs)
+            logger_args = {
+                k: v for k, v in args.items()
+                if k in ['upload_id', 'calc_id']}
+            logger = utils.get_logger(__name__, **logger_args)
+            args.update(logger=logger)
+        try:
+            return func(**args)
+        except HTTPException as e:
+            if getattr(e, 'code', None) == 500:
+                logger.error('Internal server error', exc_info=e)
+            raise e
+        except Exception as e:
+            logger.error('Internal server error', exc_info=e)
+            raise e
+
+    wrapper.__signature__ = wrapper_signature
+    return wrapper
diff --git a/nomad/api/archive.py b/nomad/api/archive.py
index 725e6c6038dbf48cb514827cdb4e629519ece5ea..d4ff5a289adf229ebcc9ca024cddddb897837ccb 100644
--- a/nomad/api/archive.py
+++ b/nomad/api/archive.py
@@ -20,109 +20,116 @@ The archive API of the nomad@FAIRDI APIs. This API is about serving processed
 import os.path
 
 from flask import send_file
-from flask_restful import abort
-
-from nomad import config
-from nomad.files import ArchiveFile, ArchiveLogFile
-from nomad.utils import get_logger
-
-from .app import app, base_path
-
-
-@app.route('%s/logs/<string:upload_hash>/<string:calc_hash>' % base_path, methods=['GET'])
-def get_calc_proc_log(upload_hash, calc_hash):
-    """
-    Get calculation processing log. Calcs are references via *upload_hash*, *calc_hash*
-    pairs.
-
-    .. :quickref: archive; Get calculation processing logs.
-
-    **Example request**:
-
-    .. sourcecode:: http
-
-        GET /nomad/api/logs/W36aqCzAKxOCfIiMFsBJh3nHPb4a/7ddvtfRfZAvc3Crr7jOJ8UH0T34I HTTP/1.1
-        Accept: application/json
-
-    :param string upload_hash: the hash of the upload (from uploaded file contents)
-    :param string calc_hash: the hash of the calculation (from mainfile)
-    :resheader Content-Type: application/json
-    :status 200: calc successfully retrieved
-    :status 404: calc with given hashes does not exist
-    :returns: the log data, a line by line sequence of structured logs
-    """
-    archive_id = '%s/%s' % (upload_hash, calc_hash)
-
-    try:
-        archive = ArchiveLogFile(archive_id)
-        if not archive.exists():
-            raise FileNotFoundError()
-
-        archive_path = archive.os_path
-
-        rv = send_file(
-            archive_path,
-            mimetype='application/text',
-            as_attachment=True,
-            attachment_filename=os.path.basename(archive_path))
-
-        return rv
-    except FileNotFoundError:
-        abort(404, message='Archive/calculation %s does not exist.' % archive_id)
-    except Exception as e:
-        logger = get_logger(
-            __name__, endpoint='logs', action='get',
-            upload_hash=upload_hash, calc_hash=calc_hash)
-        logger.error('Exception on accessing calc proc log', exc_info=e)
-        abort(500, message='Could not accessing the logs.')
-
-
-@app.route('%s/archive/<string:upload_hash>/<string:calc_hash>' % base_path, methods=['GET'])
-def get_calc(upload_hash, calc_hash):
-    """
-    Get calculation data in archive form. Calcs are references via *upload_hash*, *calc_hash*
-    pairs.
-
-    .. :quickref: archive; Get calculation data in archive form.
-
-    **Example request**:
-
-    .. sourcecode:: http
-
-        GET /nomad/api/archive/W36aqCzAKxOCfIiMFsBJh3nHPb4a/7ddvtfRfZAvc3Crr7jOJ8UH0T34I HTTP/1.1
-        Accept: application/json
-
-    :param string upload_hash: the hash of the upload (from uploaded file contents)
-    :param string calc_hash: the hash of the calculation (from mainfile)
-    :resheader Content-Type: application/json
-    :status 200: calc successfully retrieved
-    :status 404: calc with given hashes does not exist
-    :returns: the metainfo formated JSON data of the requested calculation
-    """
-    archive_id = '%s/%s' % (upload_hash, calc_hash)
-
-    try:
-        archive = ArchiveFile(archive_id)
-        if not archive.exists():
-            raise FileNotFoundError()
-
-        archive_path = archive.os_path
-
-        rv = send_file(
-            archive_path,
-            mimetype='application/json',
-            as_attachment=True,
-            attachment_filename=os.path.basename(archive_path))
-
-        if config.files.compress_archive:
-            rv.headers['Content-Encoding'] = 'gzip'
-
-        return rv
-    except FileNotFoundError:
-        abort(404, message='Archive %s does not exist.' % archive_id)
-    except Exception as e:
-        logger = get_logger(
-            __name__, endpoint='archive', action='get',
-            upload_hash=upload_hash, calc_hash=calc_hash)
-        logger.error('Exception on accessing archive', exc_info=e)
-        abort(500, message='Could not accessing the archive.')
+from flask_restplus import abort, Resource
+
+import nomad_meta_info
+
+from nomad.files import UploadFiles, Restricted
+
+from .app import api
+from .auth import login_if_available, create_authorization_predicate, \
+    signature_token_argument, with_signature_token
+from .common import calc_route
+
+ns = api.namespace(
+    'archive',
+    description='Access archive data and archive processing logs.')
+
+
+archive_file_request_parser = api.parser()
+archive_file_request_parser.add_argument(**signature_token_argument)
+
+
+@calc_route(ns, '/logs')
+class ArchiveCalcLogResource(Resource):
+    @api.doc('get_archive_logs')
+    @api.response(404, 'The upload or calculation does not exist')
+    @api.response(401, 'Not authorized to access the data.')
+    @api.response(200, 'Archive data send', headers={'Content-Type': 'application/plain'})
+    @api.expect(archive_file_request_parser, validate=True)
+    @login_if_available
+    @with_signature_token
+    def get(self, upload_id, calc_id):
+        """
+        Get calculation processing log.
+
+        Calcs are references via *upload_id*, *calc_id* pairs.
+        """
+        archive_id = '%s/%s' % (upload_id, calc_id)
+
+        upload_files = UploadFiles.get(
+            upload_id, is_authorized=create_authorization_predicate(upload_id, calc_id))
+
+        if upload_files is None:
+            abort(404, message='Upload %s does not exist.' % upload_id)
+
+        try:
+            return send_file(
+                upload_files.archive_log_file(calc_id, 'rb'),
+                mimetype='text/plain',
+                as_attachment=True,
+                attachment_filename='%s.log' % archive_id)
+        except Restricted:
+            abort(401, message='Not authorized to access %s/%s.' % (upload_id, calc_id))
+        except KeyError:
+            abort(404, message='Calculation %s does not exist.' % archive_id)
+
+
+@calc_route(ns)
+class ArchiveCalcResource(Resource):
+    @api.doc('get_archive_calc')
+    @api.response(404, 'The upload or calculation does not exist')
+    @api.response(401, 'Not authorized to access the data.')
+    @api.response(200, 'Archive data send')
+    @api.expect(archive_file_request_parser, validate=True)
+    @login_if_available
+    @with_signature_token
+    def get(self, upload_id, calc_id):
+        """
+        Get calculation data in archive form.
+
+        Calcs are references via *upload_id*, *calc_id* pairs.
+        """
+        archive_id = '%s/%s' % (upload_id, calc_id)
+
+        upload_file = UploadFiles.get(
+            upload_id, is_authorized=create_authorization_predicate(upload_id, calc_id))
+
+        if upload_file is None:
+            abort(404, message='Archive %s does not exist.' % upload_id)
+
+        try:
+            return send_file(
+                upload_file.archive_file(calc_id, 'rb'),
+                mimetype='application/json',
+                as_attachment=True,
+                attachment_filename='%s.json' % archive_id)
+        except Restricted:
+            abort(401, message='Not authorized to access %s/%s.' % (upload_id, calc_id))
+        except KeyError:
+            abort(404, message='Calculation %s does not exist.' % archive_id)
+
+
+@ns.route('/metainfo/<string:metainfo_path>')
+@api.doc(params=dict(nomad_metainfo_path='A path or metainfo definition file name.'))
+class MetainfoResource(Resource):
+    @api.doc('get_metainfo')
+    @api.response(404, 'The metainfo does not exist')
+    @api.response(200, 'Metainfo data send')
+    def get(self, metainfo_path):
+        """
+        Get a metainfo definition file.
+        """
+        try:
+            file_dir = os.path.dirname(os.path.abspath(nomad_meta_info.__file__))
+            metainfo_file = os.path.normpath(os.path.join(file_dir, metainfo_path.strip()))
+
+            rv = send_file(
+                metainfo_file,
+                mimetype='application/json',
+                as_attachment=True,
+                attachment_filename=os.path.basename(metainfo_file))
+
+            return rv
+        except FileNotFoundError:
+            abort(404, message='The metainfo %s does not exist.' % metainfo_path)
diff --git a/nomad/api/auth.py b/nomad/api/auth.py
index c9a721e72d1b99a4f05855c8eedd46e2ea39b4d4..bc4ad5b4588c16578998cd4524336efd3dd0a973 100644
--- a/nomad/api/auth.py
+++ b/nomad/api/auth.py
@@ -36,33 +36,53 @@ authenticated user information for authorization or otherwise.
 """
 
 from flask import g, request
-from flask_restful import abort
+from flask_restplus import abort, Resource, fields
 from flask_httpauth import HTTPBasicAuth
 
-from nomad import config
-from nomad.coe_repo import User
+from nomad import config, processing, files, utils, coe_repo
+from nomad.coe_repo import User, LoginException
 
-from .app import app, base_path
+from .app import app, api
 
 app.config['SECRET_KEY'] = config.services.api_secret
 auth = HTTPBasicAuth()
 
 
+# Authentication scheme definitions, for swagger only.
+api.authorizations = {
+    'HTTP Basic': {
+        'type': 'basic'
+    },
+    'X-Token': {
+        'type': 'apiKey',
+        'in': 'header',
+        'name': 'X-Token'
+    }
+}
+
+
 @auth.verify_password
 def verify_password(username_or_token, password):
-    # first try to authenticate by token
-    g.user = User.verify_auth_token(username_or_token)
-    if not g.user:
-        # try to authenticate with username/password
+    if username_or_token is None or username_or_token == '':
+        g.user = None
+        return True
+
+    if password is None or password == '':
+        g.user = User.verify_auth_token(username_or_token)
+        return g.user is not None
+    else:
         try:
             g.user = User.verify_user_password(username_or_token, password)
-        except Exception:
+        except Exception as e:
+            utils.get_logger(__name__).error('could not verify password', exc_info=e)
             return False
 
-    if not g.user:
-        return True  # anonymous access
+        return g.user is not None
+
 
-    return True
+@auth.error_handler
+def auth_error_handler():
+    abort(401, 'Could not authenticate user, bad credentials')
 
 
 def login_if_available(func):
@@ -70,6 +90,8 @@ def login_if_available(func):
     A decorator for API endpoint implementations that might authenticate users, but
     provide limited functionality even without users.
     """
+    @api.response(401, 'Not authorized, some data require authentication and authorization')
+    @api.doc(security=list(api.authorizations.keys()))
     @auth.login_required
     def wrapper(*args, **kwargs):
         # TODO the cutom X-Token based authentication should be replaced by a real
@@ -78,7 +100,7 @@ def login_if_available(func):
             token = request.headers['X-Token']
             g.user = User.verify_auth_token(token)
             if not g.user:
-                abort(401, message='Provided access token is not valid or does not exist.')
+                abort(401, message='Not authorized, some data require authentication and authorization')
 
         return func(*args, **kwargs)
 
@@ -92,10 +114,12 @@ def login_really_required(func):
     A decorator for API endpoint implementations that forces user authentication on
     endpoints.
     """
+    @api.response(401, 'Authentication required or not authorized to access requested data')
+    @api.doc(security=list(api.authorizations.keys()))
     @login_if_available
     def wrapper(*args, **kwargs):
         if g.user is None:
-            abort(401, message='Anonymous access is forbidden, authorization required')
+            abort(401, message='Authentication required or not authorized to access requested data')
         else:
             return func(*args, **kwargs)
     wrapper.__name__ = func.__name__
@@ -103,20 +127,117 @@ def login_really_required(func):
     return wrapper
 
 
-@app.route('%s/token' % base_path)
-@login_really_required
-def get_auth_token():
+ns = api.namespace(
+    'auth',
+    description='Authentication related endpoints.')
+
+
+user_model = api.model('User', {
+    'first_name': fields.String(description='The user\'s first name'),
+    'last_name': fields.String(description='The user\'s last name'),
+    'email': fields.String(description='Guess what, the user\'s email'),
+    'affiliation': fields.String(description='The user\'s affiliation'),
+    'token': fields.String(
+        description='The access token that authenticates the user with the API. '
+        'User the HTTP header "X-Token" to provide it in API requests.')
+})
+
+
+@ns.route('/user')
+class UserResource(Resource):
+    @api.doc('get_user')
+    @api.marshal_with(user_model, skip_none=True, code=200, description='User data send')
+    @login_really_required
+    def get(self):
+        """
+        Get user information including a long term access token for the authenticated user.
+
+        You can use basic authentication to access this endpoint and receive a
+        token for further api access. This token will expire at some point and presents
+        a more secure method of authentication.
+        """
+        try:
+            return g.user
+        except LoginException:
+            abort(
+                401,
+                message='User not logged in, provide credentials via Basic HTTP authentication.')
+
+
+token_model = api.model('Token', {
+    'user': fields.Nested(user_model),
+    'token': fields.String(description='The short term token to sign URLs'),
+    'experies_at': fields.DateTime(desription='The time when the token expires')
+})
+
+
+signature_token_argument = dict(
+    name='token', type=str, help='Token that signs the URL and authenticates the user',
+    location='args')
+
+
+@ns.route('/token')
+class TokenResource(Resource):
+    @api.doc('get_token')
+    @api.marshal_with(token_model, skip_none=True, code=200, description='Token send')
+    @login_really_required
+    def get(self):
+        """
+        Generates a short (10s) term JWT token that can be used to authenticate the user in
+        URLs towards most API get request, e.g. for file downloads on the
+        raw or archive api endpoints. Use the token query parameter to sign URLs.
+        """
+        token, expires_at = g.user.get_signature_token()
+        return {
+            'user': g.user,
+            'token': token,
+            'expires_at': expires_at.isoformat()
+        }
+
+
+def with_signature_token(func):
     """
-    Get a token for authenticated users. This is currently disabled and all authentication
-    matters are solved by the NOMAD-coe repository GUI.
+    A decorator for API endpoint implementations that validates signed URLs.
+    """
+    @api.response(401, 'Invalid or expired signature token')
+    def wrapper(*args, **kwargs):
+        token = request.args.get('token', None)
+        if token is not None:
+            try:
+                g.user = coe_repo.User.verify_signature_token(token)
+            except LoginException:
+                abort(401, 'Invalid or expired signature token')
 
-    .. :quickref: Get a token to authenticate the user in follow up requests.
+        return func(*args, **kwargs)
+    wrapper.__name__ = func.__name__
+    wrapper.__doc__ = func.__doc__
+    return wrapper
 
-    :resheader Content-Type: application/json
-    :status 200: calc successfully retrieved
-    :returns: an authentication token that is valid for 10 minutes.
+
+def create_authorization_predicate(upload_id, calc_id=None):
+    """
+    Returns a predicate that determines if the logged in user has the authorization
+    to access the given upload and calculation.
     """
-    assert False, 'All authorization is none via NOMAD-coe repository GUI'
-    # TODO all authorization is done via NOMAD-coe repository GUI
-    # token = g.user.generate_auth_token(600)
-    # return jsonify({'token': token.decode('ascii'), 'duration': 600})
+    def func():
+        if g.user is None:
+            # guest users don't have authorized access to anything
+            return False
+
+        # look in repository
+        upload = coe_repo.Upload.from_upload_id(upload_id)
+        if upload is not None:
+            return upload.user_id == g.user.user_id
+
+        # look in staging
+        staging_upload = processing.Upload.get(upload_id)
+        if staging_upload is not None:
+            return str(g.user.user_id) == str(staging_upload.user_id)
+
+        # There are no db entries for the given resource
+        if files.UploadFiles.get(upload_id) is not None:
+            logger = utils.get_logger(__name__, upload_id=upload_id, calc_id=calc_id)
+            logger.error('Upload files without respective db entry')
+
+        raise KeyError
+    return func
diff --git a/nomad/api/common.py b/nomad/api/common.py
new file mode 100644
index 0000000000000000000000000000000000000000..9dbd8ad565c1e82f97bd888aabff43b37b5b7ece
--- /dev/null
+++ b/nomad/api/common.py
@@ -0,0 +1,54 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Common data, variables, decorators, models used throughout the API.
+"""
+
+from flask_restplus import fields
+
+from .app import api
+
+
+pagination_model = api.model('Pagination', {
+    'total': fields.Integer,
+    'page': fields.Integer,
+    'per_page': fields.Integer,
+})
+""" Model used in responsed with pagination. """
+
+
+pagination_request_parser = api.parser()
+""" Parser used for requests with pagination. """
+
+pagination_request_parser.add_argument(
+    'page', type=int, help='The page, starting with 1.', location='args')
+pagination_request_parser.add_argument(
+    'per_page', type=int, help='Desired calcs per page.', location='args')
+pagination_request_parser.add_argument(
+    'order_by', type=str, help='The field to sort by.', location='args')
+pagination_request_parser.add_argument(
+    'order', type=int, help='Use -1 for decending and 1 for acending order.', location='args')
+
+
+def calc_route(ns, prefix: str = ''):
+    """ A resource decorator for /<upload>/<calc> based routes. """
+    def decorator(func):
+        ns.route('%s/<string:upload_id>/<string:calc_id>' % prefix)(
+            api.doc(params={
+                'upload_id': 'The unique id for the requested upload.',
+                'calc_id': 'The unique id for the requested calculation.'
+            })(func)
+        )
+    return decorator
diff --git a/nomad/api/raw.py b/nomad/api/raw.py
index 3f244959395bcf07f98b406c1f8db041e76b82f5..9d1a76f5387e0c70ee3bacc0098ad0cd4b969450 100644
--- a/nomad/api/raw.py
+++ b/nomad/api/raw.py
@@ -16,197 +16,174 @@
 The raw API of the nomad@FAIRDI APIs. Can be used to retrieve raw calculation files.
 """
 
-# TODO implement restrictions based on user, permissions, and upload/calc metadata
-
 import os.path
 from zipfile import ZIP_DEFLATED, ZIP_STORED
 
 import zipstream
-from flask import Response, request, send_file
-from flask_restful import abort
-from werkzeug.exceptions import HTTPException
-
-from nomad.files import RepositoryFile
-from nomad.utils import get_logger
-
-from .app import app, base_path
-
-
-def fix_file_paths(path):
-    """ Removed the leading data from file paths that where given in mainfile uris. """
-    # TODO, mainfile URI's should change or this implementation should change
-    return path[5:]
-
-
-@app.route('%s/raw/<string:upload_hash>/<path:upload_filepath>' % base_path, methods=['GET'])
-def get_raw_file(upload_hash, upload_filepath):
-    """
-    Get a single raw calculation file from a given upload (or many files via wildcard).
-
-    .. :quickref: raw; Get single raw calculation file.
-
-    **Example request**:
-
-    .. sourcecode:: http
-
-        GET /nomad/api/raw/W36aqCzAKxOCfIiMFsBJh3nHPb4a/Si/si.out HTTP/1.1
-        Accept: application/gz
-
-    :param string upload_hash: the hash based identifier of the upload
-    :param path upload_filepath: the path to the desired file within the upload;
-        can also contain a wildcard * at the end to denote all files with path as prefix
-    :qparam compress: any value to use compression for wildcard downloads, default is no compression
-    :resheader Content-Type: application/gz
-    :status 200: calc raw data successfully retrieved
-    :status 404: upload with given hash does not exist or the given file does not exist
-    :returns: the gzipped raw data in the body or a zip file when wildcard was used
-    """
-    upload_filepath = fix_file_paths(upload_filepath)
-
-    repository_file = RepositoryFile(upload_hash)
-    if not repository_file.exists():
-        abort(404, message='The upload with hash %s does not exist.' % upload_hash)
-
-    if upload_filepath[-1:] == '*':
-        upload_filepath = upload_filepath[0:-1]
-        files = list(
-            file for file in repository_file.manifest
-            if file.startswith(upload_filepath))
-        if len(files) == 0:
-            abort(404, message='There are no files for %s.' % upload_filepath)
-        else:
-            compress = request.args.get('compress', None) is not None
-            return respond_to_get_raw_files(upload_hash, files, compress)
-
-    try:
-        the_file = repository_file.get_file(upload_filepath)
-        with the_file.open() as f:
-            rv = send_file(
-                f,
+from flask import Response, request, send_file, stream_with_context
+from flask_restplus import abort, Resource, fields
+
+from nomad.files import UploadFiles, Restricted
+
+from .app import api
+from .auth import login_if_available, create_authorization_predicate, \
+    signature_token_argument, with_signature_token
+
+ns = api.namespace('raw', description='Downloading raw data files.')
+
+
+raw_file_compress_argument = dict(
+    name='compress', type=bool, help='Use compression on .zip files, default is not.',
+    location='args')
+raw_file_from_path_parser = api.parser()
+raw_file_from_path_parser.add_argument(**raw_file_compress_argument)
+raw_file_from_path_parser.add_argument(**signature_token_argument)
+
+
+@ns.route('/<string:upload_id>/<path:path>')
+@api.doc(params={
+    'upload_id': 'The unique id for the requested upload.',
+    'path': 'The path to a file or directory.'
+})
+@api.header('Content-Type', 'application/gz')
+class RawFileFromPathResource(Resource):
+    @api.doc('get')
+    @api.response(404, 'The upload or path does not exist')
+    @api.response(401, 'Not authorized to access the data.')
+    @api.response(200, 'File(s) send', headers={'Content-Type': 'application/gz'})
+    @api.expect(raw_file_from_path_parser, validate=True)
+    @login_if_available
+    @with_signature_token
+    def get(self, upload_id: str, path: str):
+        """
+        Get a single raw calculation file or whole directory from a given upload.
+
+        If the given path points to a file, the file is provided. If the given path
+        points to an directory, the directory and all contents is provided as .zip file.
+        Zip files are streamed; instead of 401 errors, the zip file will just not contain
+        any files that the user is not authorized to access.
+        """
+        upload_filepath = path
+
+        upload_files = UploadFiles.get(
+            upload_id, create_authorization_predicate(upload_id))
+        if upload_files is None:
+            abort(404, message='The upload with id %s does not exist.' % upload_id)
+
+        if upload_filepath[-1:] == '*':
+            upload_filepath = upload_filepath[0:-1]
+            files = list(upload_files.raw_file_manifest(path_prefix=upload_filepath))
+            if len(files) == 0:
+                abort(404, message='There are no files for %s.' % upload_filepath)
+            else:
+                compress = request.args.get('compress', None) is not None
+                return respond_to_get_raw_files(upload_id, files, compress)
+
+        try:
+            return send_file(
+                upload_files.raw_file(upload_filepath, 'br'),
                 mimetype='application/octet-stream',
                 as_attachment=True,
                 attachment_filename=os.path.basename(upload_filepath))
-            return rv
-    except KeyError:
-        files = list(file for file in repository_file.manifest if file.startswith(upload_filepath))
-        if len(files) == 0:
-            abort(404, message='The file %s does not exist.' % upload_filepath)
-        else:
-            abort(404, message='The file %s does not exist, but there are files with matching paths' % upload_filepath, files=files)
-    except HTTPException as e:
-        raise e
-    except Exception as e:
-        logger = get_logger(
-            __name__, endpoint='raw', action='get',
-            upload_hash=upload_hash, upload_filepath=upload_filepath)
-        logger.error('Exception on accessing raw data', exc_info=e)
-        abort(500, message='Could not accessing the raw data.')
-
-
-@app.route('%s/raw/<string:upload_hash>' % base_path, methods=['GET'])
-def get_raw_files(upload_hash):
-    """
-    Get multiple raw calculation files.
-
-    .. :quickref: raw; Get multiple raw calculation files.
-
-    **Example request**:
-
-    .. sourcecode:: http
-
-        GET /nomad/api/raw/W36aqCzAKxOCfIiMFsBJh3nHPb4a?files=Si/si.out,Si/aux.txt HTTP/1.1
-        Accept: application/gz
-
-    :param string upload_hash: the hash based identifier of the upload
-    :qparam string files: a comma separated list of file path
-    :qparam compress: any value to use compression, default is no compression
-    :resheader Content-Type: application/json
-    :status 200: calc raw data successfully retrieved
-    :status 404: calc with given hash does not exist or one of the given files does not exist
-    :returns: a streamed .zip archive with the raw data
-    """
-    files_str = request.args.get('files', None)
-    compress = request.args.get('compress', None) is not None
-
-    if files_str is None:
-        abort(400, message="No files argument given.")
-    files = [fix_file_paths(file.strip()) for file in files_str.split(',')]
-
-    return respond_to_get_raw_files(upload_hash, files, compress)
-
-
-@app.route('%s/raw/<string:upload_hash>' % base_path, methods=['POST'])
-def get_raw_files_post(upload_hash):
-    """
-    Get multiple raw calculation files.
-
-    .. :quickref: raw; Get multiple raw calculation files.
-
-    **Example request**:
-
-    .. sourcecode:: http
-
-        POST /nomad/api/raw/W36aqCzAKxOCfIiMFsBJh3nHPb4a HTTP/1.1
-        Accept: application/gz
-        Content-Type: application/json
-
-        {
-            "files": ["Si/si.out", "Si/aux.txt"]
-        }
-
-    :param string upload_hash: the hash based identifier of the upload
-    :jsonparam files: a comma separated list of file paths
-    :jsonparam compress: boolean to enable compression (true), default is not compression (false)
-    :resheader Content-Type: application/json
-    :status 200: calc raw data successfully retrieved
-    :status 404: calc with given hash does not exist or one of the given files does not exist
-    :returns: a streamed .zip archive with the raw data
-    """
-    json_data = request.get_json()
-    if json_data is None:
-        json_data = {}
-
-    if 'files' not in json_data:
-        abort(400, message='No files given, use key "files" in json body to provide file paths.')
-    compress = json_data.get('compress', False)
-    if not isinstance(compress, bool):
-        abort(400, message='Compress value %s is not a bool.' % str(compress))
-    files = [fix_file_paths(file.strip()) for file in json_data['files']]
-
-    return respond_to_get_raw_files(upload_hash, files, compress)
-
-
-def respond_to_get_raw_files(upload_hash, files, compress=False):
-    logger = get_logger(__name__, endpoint='raw', action='get files', upload_hash=upload_hash)
-
-    repository_file = RepositoryFile(upload_hash)
-    if not repository_file.exists():
-        abort(404, message='The upload with hash %s does not exist.' % upload_hash)
+        except Restricted:
+            abort(401, message='Not authorized to access upload %s.' % upload_id)
+        except KeyError:
+            files = list(file for file in upload_files.raw_file_manifest(upload_filepath))
+            if len(files) == 0:
+                abort(404, message='The file %s does not exist.' % upload_filepath)
+            else:
+                abort(404, message='The file %s does not exist, but there are files with matching paths' % upload_filepath, files=files)
+
+
+raw_files_request_model = api.model('RawFilesRequest', {
+    'files': fields.List(
+        fields.String, default=[], description='List of files to download.'),
+    'compress': fields.Boolean(
+        default=False,
+        description='Enable compression, default is not compression.')
+})
+
+raw_files_request_parser = api.parser()
+raw_files_request_parser.add_argument(
+    'files', required=True, type=str, help='Comma separated list of files to download.', location='args')
+raw_files_request_parser.add_argument(**raw_file_compress_argument)
+raw_file_from_path_parser.add_argument(**signature_token_argument)
+
+
+@ns.route('/<string:upload_id>')
+@api.doc(params={
+    'upload_id': 'The unique id for the requested upload.'
+})
+class RawFilesResource(Resource):
+    @api.doc('get_files')
+    @api.response(404, 'The upload or path does not exist')
+    @api.response(200, 'File(s) send', headers={'Content-Type': 'application/gz'})
+    @api.expect(raw_files_request_model, validate=True)
+    @login_if_available
+    def post(self, upload_id):
+        """
+        Download multiple raw calculation files in a .zip file.
+        Zip files are streamed; instead of 401 errors, the zip file will just not contain
+        any files that the user is not authorized to access.
+        """
+        json_data = request.get_json()
+        compress = json_data.get('compress', False)
+        files = [file.strip() for file in json_data['files']]
+
+        return respond_to_get_raw_files(upload_id, files, compress)
+
+    @api.doc('get_files_alternate')
+    @api.response(404, 'The upload or path does not exist')
+    @api.response(200, 'File(s) send', headers={'Content-Type': 'application/gz'})
+    @api.expect(raw_files_request_parser, validate=True)
+    @login_if_available
+    @with_signature_token
+    def get(self, upload_id):
+        """
+        Download multiple raw calculation files.
+        Download multiple raw calculation files in a .zip file.
+        Zip files are streamed; instead of 401 errors, the zip file will just not contain
+        any files that the user is not authorized to access.
+        """
+        files_str = request.args.get('files', None)
+        compress = request.args.get('compress', 'false') == 'true'
+
+        if files_str is None:
+            abort(400, message="No files argument given.")
+        files = [file.strip() for file in files_str.split(',')]
+
+        return respond_to_get_raw_files(upload_id, files, compress)
+
+
+def respond_to_get_raw_files(upload_id, files, compress=False):
+    upload_files = UploadFiles.get(
+        upload_id, create_authorization_predicate(upload_id))
+    if upload_files is None:
+        abort(404, message='The upload with id %s does not exist.' % upload_id)
 
     def generator():
         """ Stream a zip file with all files using zipstream. """
         def iterator():
             """ Replace the directory based iter of zipstream with an iter over all given files. """
-            try:
-                with repository_file.zipped_container.zip_file() as zf:
-                    for filename in files:
-                        # Write a file to the zipstream.
-                        try:
-                            with zf.open(repository_file.zipped_container.get_zip_path(filename)) as f:
-                                def iter_content():
-                                    while True:
-                                        data = f.read(100000)
-                                        if not data:
-                                            break
-                                        yield data
-
-                                yield dict(arcname=filename, iterable=iter_content())
-                        except KeyError as e:
-                            # files that are not found, will not be returned
-                            pass
-
-            except Exception as e:
-                logger.error('Exception while accessing files.', exc_info=e)
+            for filename in files:
+                # Write a file to the zipstream.
+                try:
+                    with upload_files.raw_file(filename, 'rb') as f:
+                        def iter_content():
+                            while True:
+                                data = f.read(100000)
+                                if not data:
+                                    break
+                                yield data
+
+                        yield dict(arcname=filename, iterable=iter_content())
+                except KeyError:
+                    # files that are not found, will not be returned
+                    pass
+                except Restricted:
+                    # due to the streaming nature, we cannot raise 401 here
+                    # we just leave it out in the download
+                    pass
 
         compression = ZIP_DEFLATED if compress else ZIP_STORED
         zip_stream = zipstream.ZipFile(mode='w', compression=compression, allowZip64=True)
@@ -215,6 +192,6 @@ def respond_to_get_raw_files(upload_hash, files, compress=False):
         for chunk in zip_stream:
             yield chunk
 
-    response = Response(generator(), mimetype='application/zip')
-    response.headers['Content-Disposition'] = 'attachment; filename={}'.format('%s.zip' % upload_hash)
+    response = Response(stream_with_context(generator()), mimetype='application/zip')
+    response.headers['Content-Disposition'] = 'attachment; filename={}'.format('%s.zip' % upload_id)
     return response
diff --git a/nomad/api/repo.py b/nomad/api/repo.py
new file mode 100644
index 0000000000000000000000000000000000000000..7131715ee52d1f937bb8df188b7fa34412ca3d3b
--- /dev/null
+++ b/nomad/api/repo.py
@@ -0,0 +1,117 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+The repository API of the nomad@FAIRDI APIs. Currently allows to resolve repository
+meta-data.
+"""
+
+from flask_restplus import Resource, abort, fields
+
+from nomad.files import UploadFiles, Restricted
+
+from .app import api
+from .auth import login_if_available, create_authorization_predicate
+from .common import pagination_model, pagination_request_parser, calc_route
+
+ns = api.namespace('repo', description='Access repository metadata.')
+
+
+@calc_route(ns)
+class RepoCalcResource(Resource):
+    @api.response(404, 'The upload or calculation does not exist')
+    @api.response(401, 'Not authorized to access the calculation')
+    @api.response(200, 'Metadata send', fields.Raw)
+    @api.doc('get_repo_calc')
+    @login_if_available
+    def get(self, upload_id, calc_id):
+        """
+        Get calculation metadata in repository form.
+
+        Repository metadata only entails the quanties shown in the repository.
+        Calcs are references via *upload_id*, *calc_id* pairs.
+        """
+        # TODO use elastic search instead of the files
+        # TODO add missing user metadata (from elastic or repo db)
+        upload_files = UploadFiles.get(upload_id, create_authorization_predicate(upload_id, calc_id))
+        if upload_files is None:
+            abort(404, message='There is no upload %s' % upload_id)
+
+        try:
+            return upload_files.metadata.get(calc_id), 200
+        except Restricted:
+            abort(401, message='Not authorized to access %s/%s.' % (upload_id, calc_id))
+        except KeyError:
+            abort(404, message='There is no calculation for %s/%s' % (upload_id, calc_id))
+
+
+repo_calcs_model = api.model('RepoCalculations', {
+    'pagination': fields.Nested(pagination_model),
+    'results': fields.List(fields.Raw)
+})
+
+repo_request_parser = pagination_request_parser.copy()
+repo_request_parser.add_argument(
+    'owner', type=str,
+    help='Specify which calcs to return: ``all``, ``user``, ``staging``, default is ``all``')
+
+
+@ns.route('/')
+class RepoCalcsResource(Resource):
+    @api.doc('get_calcs')
+    @api.response(400, 'Invalid requests, e.g. wrong owner type')
+    @api.expect(repo_request_parser, validate=True)
+    @api.marshal_with(repo_calcs_model, skip_none=True, code=200, description='Metadata send')
+    @login_if_available
+    def get(self):
+        """
+        Get *'all'* calculations in repository from, paginated.
+
+        This is currently not implemented!
+        """
+        return dict(pagination=dict(total=0, page=1, per_page=10), results=[]), 200
+        # page = int(request.args.get('page', 1))
+        # per_page = int(request.args.get('per_page', 10))
+        # owner = request.args.get('owner', 'all')
+
+        # try:
+        #     assert page >= 1
+        #     assert per_page > 0
+        # except AssertionError:
+        #     abort(400, message='invalid pagination')
+
+        # if owner == 'all':
+        #     search = RepoCalc.search().query('match_all')
+        # elif owner == 'user':
+        #     if g.user is None:
+        #         abort(401, message='Authentication required for owner value user.')
+        #     search = RepoCalc.search().query('match_all')
+        #     search = search.filter('term', user_id=str(g.user.user_id))
+        # elif owner == 'staging':
+        #     if g.user is None:
+        #         abort(401, message='Authentication required for owner value user.')
+        #     search = RepoCalc.search().query('match_all')
+        #     search = search.filter('term', user_id=str(g.user.user_id)).filter('term', staging=True)
+        # else:
+        #     abort(400, message='Invalid owner value. Valid values are all|user|staging, default is all')
+
+        # search = search[(page - 1) * per_page: page * per_page]
+        # return {
+        #     'pagination': {
+        #         'total': search.count(),
+        #         'page': page,
+        #         'per_page': per_page
+        #     },
+        #     'results': [result.json_dict for result in search]
+        # }, 200
diff --git a/nomad/api/repository.py b/nomad/api/repository.py
deleted file mode 100644
index 252490e4d35a0ab4bb9a27aa57591f4b4ef1b708..0000000000000000000000000000000000000000
--- a/nomad/api/repository.py
+++ /dev/null
@@ -1,193 +0,0 @@
-# Copyright 2018 Markus Scheidgen
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an"AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-The repository API of the nomad@FAIRDI APIs. Currently allows to resolve repository
-meta-data.
-"""
-
-from elasticsearch.exceptions import NotFoundError
-from flask import g, request
-from flask_restful import Resource, abort
-
-from nomad.repo import RepoCalc
-
-from .app import api, base_path
-from .auth import login_if_available
-
-
-class RepoCalcRes(Resource):
-    def get(self, upload_hash, calc_hash):
-        """
-        Get calculation data in repository form, which only entails the quanties shown
-        in the repository. This is basically the elastic search index entry for the
-        requested calculations. Calcs are references via *upload_hash*, *calc_hash*
-        pairs.
-
-        .. :quickref: repo; Get calculation data in repository form.
-
-        **Example request**:
-
-        .. sourcecode:: http
-
-            GET /nomad/api/repo/W36aqCzAKxOCfIiMFsBJh3nHPb4a/7ddvtfRfZAvc3Crr7jOJ8UH0T34I HTTP/1.1
-            Accept: application/json
-
-        **Example response**:
-
-        .. sourcecode:: http
-
-            HTTP/1.1 200 OK
-            Vary: Accept
-            Content-Type: application/json
-
-            {
-                "calc_hash":"7ddvtfRfZAvc3Crr7jOJ8UH0T34I",
-                "upload_time":"2018-08-30T08:41:51.771367",
-                "upload_id":"5b87adb813a441000a70a968",
-                "upload_hash":"W36aqCzAKxOCfIiMFsBJh3nHPb4a",
-                "mainfile":"RopD3Mo8oMV_-E5bh8uW5PiiCRkH1/data/BrK_svSi/TFCC010.CAB/vasprun.xml.relax1",
-                "program_name":"VASP",
-                "program_version":"4.6.35  3Apr08 complex  parallel LinuxIFC",
-                "chemical_composition":"BrKSi2",
-                "basis_set_type":"plane waves",
-                "atom_species":[
-                    35,
-                    19,
-                    14,
-                    14
-                ],
-                "system_type":"Bulk",
-                "crystal_system":"orthorhombic",
-                "space_group_number":47,
-                "configuration_raw_gid":"sq6wTJjRKb2VTajoDLVWDxHCgyN6i",
-                "XC_functional_name":"GGA_X_PBE"
-            }
-
-        :param string upload_hash: the hash of the upload (from uploaded file contents)
-        :param string calc_hash: the hash of the calculation (from mainfile)
-        :resheader Content-Type: application/json
-        :status 200: calc successfully retrieved
-        :status 404: calc with given hashes does not exist
-        :returns: the repository calculation entry
-        """
-        try:
-            return RepoCalc.get(id='%s/%s' % (upload_hash, calc_hash)).json_dict, 200
-        except NotFoundError:
-            abort(404, message='There is no calculation for %s/%s' % (upload_hash, calc_hash))
-        except Exception as e:
-            abort(500, message=str(e))
-
-
-class RepoCalcsRes(Resource):
-    @login_if_available
-    def get(self):
-        """
-        Get *'all'* calculations in repository from, paginated.
-
-        .. :quickref: repo; Get *'all'* calculations in repository from, paginated.
-
-        **Example request**:
-
-        .. sourcecode:: http
-
-            GET /nomad/api/repo?page=1&per_page=25 HTTP/1.1
-            Accept: application/json
-
-        **Example response**:
-
-        .. sourcecode:: http
-
-            HTTP/1.1 200 OK
-            Vary: Accept
-            Content-Type: application/json
-
-            {
-                "pagination":{
-                    "total":1,
-                    "page":1,
-                    "per_page":25
-                },
-                "results":[
-                    {
-                        "calc_hash":"7ddvtfRfZAvc3Crr7jOJ8UH0T34I",
-                        "upload_time":"2018-08-30T08:41:51.771367",
-                        "upload_id":"5b87adb813a441000a70a968",
-                        "upload_hash":"W36aqCzAKxOCfIiMFsBJh3nHPb4a",
-                        "mainfile":"RopD3Mo8oMV_-E5bh8uW5PiiCRkH1/data/BrK_svSi/TFCC010.CAB/vasprun.xml.relax1",
-                        "program_name":"VASP",
-                        "program_version":"4.6.35  3Apr08 complex  parallel LinuxIFC",
-                        "chemical_composition":"BrKSi2",
-                        "basis_set_type":"plane waves",
-                        "atom_species":[
-                            35,
-                            19,
-                            14,
-                            14
-                        ],
-                        "system_type":"Bulk",
-                        "crystal_system":"orthorhombic",
-                        "space_group_number":47,
-                        "configuration_raw_gid":"sq6wTJjRKb2VTajoDLVWDxHCgyN6i",
-                        "XC_functional_name":"GGA_X_PBE"
-                    }
-                ]
-            }
-
-        :qparam int page: the page starting with 1
-        :qparam int per_page: desired calcs per page
-        :qparam string owner: specifies which cals to return: all|user|staging, default is all
-        :resheader Content-Type: application/json
-        :status 200: calcs successfully retrieved
-        :returns: a list of repository entries in ``results`` and pagination info
-        """
-        # TODO use argparse? bad request reponse an bad params, pagination as decorator
-        page = int(request.args.get('page', 1))
-        per_page = int(request.args.get('per_page', 10))
-        owner = request.args.get('owner', 'all')
-
-        try:
-            assert page >= 1
-            assert per_page > 0
-        except AssertionError:
-            abort(400, message='invalid pagination')
-
-        if owner == 'all':
-            search = RepoCalc.search().query('match_all')
-        elif owner == 'user':
-            if g.user is None:
-                abort(401, message='Authentication required for owner value user.')
-            search = RepoCalc.search().query('match_all')
-            search = search.filter('term', user_id=str(g.user.user_id))
-        elif owner == 'staging':
-            if g.user is None:
-                abort(401, message='Authentication required for owner value user.')
-            search = RepoCalc.search().query('match_all')
-            search = search.filter('term', user_id=str(g.user.user_id)).filter('term', staging=True)
-        else:
-            abort(400, message='Invalid owner value. Valid values are all|user|staging, default is all')
-
-        search = search[(page - 1) * per_page: page * per_page]
-        return {
-            'pagination': {
-                'total': search.count(),
-                'page': page,
-                'per_page': per_page
-            },
-            'results': [result.json_dict for result in search]
-        }
-
-
-api.add_resource(RepoCalcsRes, '%s/repo' % base_path)
-api.add_resource(RepoCalcRes, '%s/repo/<string:upload_hash>/<string:calc_hash>' % base_path)
diff --git a/nomad/api/upload.py b/nomad/api/upload.py
index ec387fc4623f80d88b13dab59eb296124ad49142..17ab525ac4a66c7deb77d57e1fee85d08be55715 100644
--- a/nomad/api/upload.py
+++ b/nomad/api/upload.py
@@ -12,156 +12,124 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from datetime import datetime
+"""
+The upload API of the nomad@FAIRDI APIs. Provides endpoints to upload files and
+get the processing status of uploads.
+"""
 
 from flask import g, request
-from flask_restful import Resource, abort
+from flask_restplus import Resource, fields, abort
+from datetime import datetime
+from werkzeug.datastructures import FileStorage
+import os.path
 
-from nomad.files import UploadFile
-from nomad.processing import NotAllowedDuringProcessing, Upload
-from nomad.utils import get_logger
+from nomad import config
+from nomad.processing import Upload, FAILURE
+from nomad.processing import ProcessAlreadyRunning
+from nomad.files import ArchiveBasedStagingUploadFiles
 
-from .app import api, base_path
+from .app import api, with_logger
 from .auth import login_really_required
-
-"""
-The upload API of the nomad@FAIRDI APIs. Provides endpoints to create uploads, upload
-files, and retrieve the processing status of uploads.
-"""
-
-
-class UploadsRes(Resource):
-    """ Uploads """
+from .common import pagination_request_parser, pagination_model
+
+
+ns = api.namespace(
+    'uploads',
+    description='Uploading data and tracing uploaded data and its processing.')
+
+
+proc_model = api.model('Processing', {
+    'tasks': fields.List(fields.String),
+    'current_task': fields.String,
+    'tasks_running': fields.Boolean,
+    'tasks_status': fields.String,
+    'errors': fields.List(fields.String),
+    'warnings': fields.List(fields.String),
+    'create_time': fields.DateTime(dt_format='iso8601'),
+    'complete_time': fields.DateTime(dt_format='iso8601'),
+    'current_process': fields.String,
+    'process_running': fields.Boolean,
+})
+
+metadata_model = api.model('MetaData', {
+    'with_embargo': fields.Boolean(default=False, description='Data with embargo is only visible to the upload until the embargo period ended.'),
+    'comment': fields.String(description='The comment are shown in the repository for each calculation.'),
+    'references': fields.List(fields.String, descriptions='References allow to link calculations to external source, e.g. URLs.'),
+    'coauthors': fields.List(fields.String, description='A list of co-authors given by user_id.'),
+    'shared_with': fields.List(fields.String, description='A list of users to share calculations with given by user_id.'),
+    '_upload_time': fields.List(fields.DateTime(dt_format='iso8601'), description='Overrride the upload time.'),
+    '_uploader': fields.List(fields.String, description='Override the uploader with the given user id.')
+})
+
+calc_metadata_model = api.inherit('CalcMetaData', metadata_model, {
+    'mainfile': fields.String(description='The calculation main output file is used to identify the calculation in the upload.'),
+    '_checksum': fields.String(description='Override the calculation checksum'),
+    '_pid': fields.String(description='Assign a specific pid. It must be unique.')
+})
+
+upload_metadata_model = api.inherit('UploadMetaData', metadata_model, {
+    'calculations': fields.List(fields.Nested(model=calc_metadata_model), description='Specific per calculation data that will override the upload data.')
+})
+
+upload_model = api.inherit('UploadProcessing', proc_model, {
+    'name': fields.String(
+        description='The name of the upload. This can be provided during upload '
+                    'using the name query parameter.'),
+    'upload_id': fields.String(
+        description='The unique id for the upload.'),
+    'metadata': fields.Nested(model=upload_metadata_model, description='Additional upload and calculation meta data.'),
+    'local_path': fields.String,
+    'upload_time': fields.DateTime(dt_format='iso8601'),
+})
+
+calc_model = api.inherit('UploadCalculationProcessing', proc_model, {
+    'calc_id': fields.String,
+    'mainfile': fields.String,
+    'upload_id': fields.String,
+    'parser': fields.String
+})
+
+upload_with_calcs_model = api.inherit('UploadWithPaginatedCalculations', upload_model, {
+    'processed_calcs': fields.Integer,
+    'total_calcs': fields.Integer,
+    'failed_calcs': fields.Integer,
+    'pending_calcs': fields.Integer,
+    'calcs': fields.Nested(model=api.model('UploadPaginatedCalculations', {
+        'pagination': fields.Nested(model=api.inherit('UploadCalculationPagination', pagination_model, {
+            'successes': fields.Integer,
+            'failures': fields.Integer,
+        })),
+        'results': fields.List(fields.Nested(model=calc_model))
+    }))
+})
+
+upload_command_model = api.model('UploadCommand', {
+    'command': fields.String(description='Currently commit is the only command.'),
+    'metadata': fields.Nested(model=upload_metadata_model, description='Additional upload and calculation meta data. Will replace previously given metadata.')
+})
+
+
+upload_metadata_parser = api.parser()
+upload_metadata_parser.add_argument('name', type=str, help='An optional name for the upload.', location='args')
+upload_metadata_parser.add_argument('local_path', type=str, help='Use a local file on the server.', location='args')
+upload_metadata_parser.add_argument('file', type=FileStorage, help='The file to upload.', location='files')
+
+
+@ns.route('/')
+class UploadListResource(Resource):
+    @api.doc('get_uploads')
+    @api.marshal_list_with(upload_model, skip_none=True, code=200, description='Uploads send')
     @login_really_required
     def get(self):
-        """
-        Get a list of current users uploads.
-
-        .. :quickref: upload; Get a list of current users uploads.
-
-        **Example request**:
-
-        .. sourcecode:: http
-
-            GET /nomad/api/uploads HTTP/1.1
-            Accept: application/json
-
-        **Example response**:
-
-        .. sourcecode:: http
-
-            HTTP/1.1 200 OK
-            Vary: Accept
-            Content-Type: application/json
-
-            [
-                {
-                    "name": "examples_vasp_6.zip",
-                    "upload_id": "5b89469e0d80d40008077dbc",
-                    "presigned_url": "http://minio:9000/uploads/5b89469e0d80d40008077dbc?X-Amz-Algorithm=AWS4-...",
-                    "create_time": "2018-08-31T13:46:06.781000",
-                    "upload_time": "2018-08-31T13:46:07.531000",
-                    "is_stale": false,
-                    "completed": true,
-                    "status": "SUCCESS",
-                    "current_task": "cleanup",
-                    "tasks": ["uploading", "extracting", "parse_all", "cleanup"]
-                    "errors": [],
-                    "warnings": []
-                }
-            ]
-
-        :resheader Content-Type: application/json
-        :status 200: uploads successfully provided
-        :returns: list of :class:`nomad.data.Upload`
-        """
-        return [upload.json_dict for upload in Upload.user_uploads(g.user)], 200
+        """ Get the list of all uploads from the authenticated user. """
+        return [upload for upload in Upload.user_uploads(g.user)], 200
 
+    @api.doc('upload')
+    @api.marshal_with(upload_model, skip_none=True, code=200, description='Upload received')
+    @api.expect(upload_metadata_parser)
     @login_really_required
-    def post(self):
-        """
-        Create a new upload. Creating an upload on its own wont do much, but provide
-        a *presigned* upload URL. PUT a file to this URL to do the actual upload and
-        initiate the processing.
-
-        .. :quickref: upload; Create a new upload.
-
-        **Example request**:
-
-        .. sourcecode:: http
-
-            POST /nomad/api/uploads HTTP/1.1
-            Accept: application/json
-            Content-Type: application/json
-
-            {
-                "name": "vasp_data.zip"
-            }
-
-        **Example response**:
-
-        .. sourcecode:: http
-
-            HTTP/1.1 200 OK
-            Vary: Accept
-            Content-Type: application/json
-
-            {
-                "name": "vasp_data.zip",
-                "upload_id": "5b89469e0d80d40008077dbc",
-                "presigned_url": "http://minio:9000/uploads/5b89469e0d80d40008077dbc?X-Amz-Algorithm=AWS4-...",
-                "create_time": "2018-08-31T13:46:06.781000",
-                "upload_time": "2018-08-31T13:46:07.531000",
-                "is_stale": false,
-                "completed": true,
-                "status": "SUCCESS",
-                "current_task": "cleanup",
-                "tasks": ["uploading", "extracting", "parse_all", "cleanup"]
-                "errors": [],
-                "warnings": [],
-                "calcs": [
-                    {
-                        "current_task": "archiving",
-                        "tasks": ["parsing", "normalizing", "archiving"]
-                        "status": "SUCCESS",
-                        "errors": [],
-                        "warnings": [],
-                        "parser": "parsers/vasp",
-                        "mainfile": "Si.xml"
-                    }
-                ]
-            }
-
-        :jsonparam string name: An optional name for the upload.
-        :jsonparem string local_path: An optional path the a file that is already on the server.
-            In this case, uploading a file won't be possible, the local file is processed
-            immediatly as if it was uploaded.
-        :reqheader Content-Type: application/json
-        :resheader Content-Type: application/json
-        :status 200: upload successfully created
-        :returns: a new instance of :class:`nomad.data.Upload`
-        """
-        json_data = request.get_json()
-        if json_data is None:
-            json_data = {}
-
-        upload = Upload.create(
-            user=g.user,
-            name=json_data.get('name'),
-            local_path=json_data.get('local_path'))
-
-        if upload.local_path is not None:
-            logger = get_logger(
-                __name__, endpoint='uploads', action='post', upload_id=upload.upload_id)
-            logger.info('file uploaded offline')
-            upload.upload_time = datetime.now()
-            upload.process()
-            logger.info('initiated processing')
-
-        return upload.json_dict, 200
-
-    @login_really_required
-    def put(self):
+    @with_logger
+    def put(self, logger):
         """
         Upload a file and automatically create a new upload in the process.
         Can be used to upload files via browser or other http clients like curl.
@@ -172,137 +140,99 @@ class UploadsRes(Resource):
         filename or other meta-data. If a filename is available, it will become the
         name of the upload.
 
-        .. :quickref: upload; Upload a file directly and create an upload.
-
-        **Curl examples for both approaches**:
-
-        .. sourcecode:: sh
+        Example commands:
 
-            curl -X put "/nomad/api/uploads/" -F file=@local_file
-            curl "/nomad/api/uploads/" --upload-file local_file
-
-        :qparam name: an optional name for the upload
-        :status 200: upload successfully received.
-        :returns: the upload (see GET /uploads/<upload_id>)
+            curl -X put ".../nomad/api/uploads/" -F file=@local_file
+            curl ".../nomad/api/uploads/" --upload-file local_file
         """
+        local_path = request.args.get('local_path')
+        if local_path:
+            if not os.path.exists(local_path):
+                abort(404, message='The given local_path was not found.')
+
         # create upload
         upload = Upload.create(
             user=g.user,
-            name=request.args.get('name'))
-
-        logger = get_logger(__name__, endpoint='upload', action='put', upload_id=upload.upload_id)
-        logger.info('upload created')
+            name=request.args.get('name'),
+            local_path=local_path)
 
-        uploadFile = UploadFile(upload.upload_id)
-
-        if request.mimetype == 'application/multipart-formdata':
-            # multipart formdata, e.g. with curl -X put "url" -F file=@local_file
-            # might have performance issues for large files: https://github.com/pallets/flask/issues/2086
-            if 'file' in request.files:
-                abort(400, message='Bad multipart-formdata, there is no file part.')
-            file = request.files['file']
-            if upload.name is '':
-                upload.name = file.filename
-
-            file.save(uploadFile.os_path)
-        else:
-            # simple streaming data in HTTP body, e.g. with curl "url" -T local_file
-            try:
-                with uploadFile.open('wb') as f:
-                    while not request.stream.is_exhausted:
-                        f.write(request.stream.read(1024))
+        logger.info('upload created', upload_id=upload.upload_id)
 
-            except Exception as e:
-                logger.error('Error on streaming upload', exc_info=e)
-                abort(400, message='Some IO went wrong, download probably aborted/disrupted.')
-
-        if not uploadFile.is_valid:
-            uploadFile.delete()
+        try:
+            if local_path:
+                # file is already there and does not to be received
+                upload_files = ArchiveBasedStagingUploadFiles(
+                    upload.upload_id, create=True, local_path=local_path)
+            elif request.mimetype == 'application/multipart-formdata':
+                # multipart formdata, e.g. with curl -X put "url" -F file=@local_file
+                # might have performance issues for large files: https://github.com/pallets/flask/issues/2086
+                if 'file' in request.files:
+                    abort(400, message='Bad multipart-formdata, there is no file part.')
+                file = request.files['file']
+                if upload.name is None or upload.name is '':
+                    upload.name = file.filename
+
+                upload_files = ArchiveBasedStagingUploadFiles(upload.upload_id, create=True)
+
+                file.save(upload_files.upload_file_os_path)
+            else:
+                # simple streaming data in HTTP body, e.g. with curl "url" -T local_file
+
+                upload_files = ArchiveBasedStagingUploadFiles(upload.upload_id, create=True)
+
+                try:
+                    with open(upload_files.upload_file_os_path, 'wb') as f:
+                        while not request.stream.is_exhausted:
+                            f.write(request.stream.read(1024))
+
+                except Exception as e:
+                    logger.warning('Error on streaming upload', exc_info=e)
+                    abort(400, message='Some IO went wrong, download probably aborted/disrupted.')
+        except Exception as e:
+            upload_files.delete()
             upload.delete()
-            abort(400, message='Bad file format, excpected %s.' % ", ".join(UploadFile.formats))
+            logger.info('Invalid or aborted upload')
+            raise e
 
         logger.info('received uploaded file')
         upload.upload_time = datetime.now()
-        upload.process()
+        upload.process_upload()
         logger.info('initiated processing')
 
-        return upload.json_dict, 200
+        return upload, 200
+
+
+class ProxyUpload:
+    def __init__(self, upload, calcs):
+        self.upload = upload
+        self.calcs = calcs
+
+    def __getattr__(self, name):
+        return self.upload.__getattribute__(name)
 
 
-class UploadRes(Resource):
-    """ Uploads """
+@ns.route('/<string:upload_id>')
+@api.doc(params={'upload_id': 'The unique id for the requested upload.'})
+class UploadResource(Resource):
+    @api.doc('get_upload')
+    @api.response(404, 'Upload does not exist')
+    @api.response(400, 'Invalid parameters')
+    @api.marshal_with(upload_with_calcs_model, skip_none=True, code=200, description='Upload send')
+    @api.expect(pagination_request_parser)
     @login_really_required
-    def get(self, upload_id):
+    def get(self, upload_id: str):
         """
-        Get an update on an existing upload. Will not only return the upload, but
-        also its calculations paginated. Use the pagination params to determine
-        the page.
-
-        .. :quickref: upload; Get an update for an existing upload.
-
-        **Example request**:
-
-        .. sourcecode:: http
-
-            GET /nomad/api/uploads/5b89469e0d80d40008077dbc HTTP/1.1
-            Accept: application/json
-
-        **Example response**:
-
-        .. sourcecode:: http
-
-            HTTP/1.1 200 OK
-            Vary: Accept
-            Content-Type: application/json
-
-            {
-                "name": "vasp_data.zip",
-                "upload_id": "5b89469e0d80d40008077dbc",
-                "presigned_url": "http://minio:9000/uploads/5b89469e0d80d40008077dbc?X-Amz-Algorithm=AWS4-...",
-                "create_time": "2018-08-31T13:46:06.781000",
-                "upload_time": "2018-08-31T13:46:07.531000",
-                "is_stale": false,
-                "completed": true,
-                "status": "SUCCESS",
-                "current_task": "cleanup",
-                "tasks": ["uploading", "extracting", "parse_all", "cleanup"]
-                "errors": [],
-                "warnings": [],
-                "calcs": {
-                    "pagination": {
-                        "total": 1,
-                        "page": 1,
-                        "per_page": 25
-                    },
-                    "results": [
-                        {
-                            "current_task": "archiving",
-                            "tasks": ["parsing", "normalizing", "archiving"]
-                            "status": "SUCCESS",
-                            "errors": [],
-                            "warnings": [],
-                            "parser": "parsers/vasp",
-                            "mainfile": "Si.xml"
-                        }
-                    ]
-                }
-            }
-
-        :param string upload_id: the id for the upload
-        :qparam int page: the page starting with 1
-        :qparam int per_page: desired calcs per page
-        :qparam str order_by: the field to sort the calcs by, use [status,mainfile]
-        :resheader Content-Type: application/json
-        :status 200: upload successfully updated and retrieved
-        :status 404: upload with id does not exist
-        :returns: the :class:`nomad.data.Upload` instance
+        Get an update for an existing upload.
+
+        Will not only return the upload, but also its calculations paginated.
+        Use the pagination params to determine the page.
         """
         try:
             upload = Upload.get(upload_id)
         except KeyError:
             abort(404, message='Upload with id %s does not exist.' % upload_id)
 
-        if upload.user_id != str(g.user.user_id):
+        if upload.user_id != str(g.user.user_id) and not g.user.is_admin:
             abort(404, message='Upload with id %s does not exist.' % upload_id)
 
         try:
@@ -319,181 +249,131 @@ class UploadRes(Resource):
         except AssertionError:
             abort(400, message='invalid pagination')
 
-        if order_by not in ['mainfile', 'status', 'parser']:
+        if order_by not in ['mainfile', 'tasks_status', 'parser']:
             abort(400, message='invalid order_by field %s' % order_by)
 
         order_by = ('-%s' if order == -1 else '+%s') % order_by
 
         calcs = upload.all_calcs((page - 1) * per_page, page * per_page, order_by)
         failed_calcs = upload.failed_calcs
-        result = upload.json_dict
-        result['calcs'] = {
+        result = ProxyUpload(upload, {
             'pagination': dict(
                 total=upload.total_calcs, page=page, per_page=per_page,
                 successes=upload.processed_calcs - failed_calcs, failures=failed_calcs),
-            'results': [calc.json_dict for calc in calcs]
-        }
+            'results': [calc for calc in calcs]
+        })
 
         return result, 200
 
+    @api.doc('delete_upload')
+    @api.response(404, 'Upload does not exist')
+    @api.response(401, 'Upload does not belong to authenticated user.')
+    @api.response(400, 'The upload is still/already processed')
+    @api.marshal_with(upload_model, skip_none=True, code=200, description='Upload deleted')
     @login_really_required
-    def post(self, upload_id):
+    @with_logger
+    def delete(self, upload_id: str, logger):
         """
-        Move an upload out of the staging area. This changes the visibility of the upload.
-        Clients can specify, if the calcs should be restricted.
-
-        .. :quickref: upload; Move an upload out of the staging area.
-
-        **Example request**:
-
-        .. sourcecode:: http
-
-            POST /nomad/api/uploads HTTP/1.1
-            Accept: application/json
-            Content-Type: application/json
+        Delete an existing upload.
 
-            {
-                "operation": "unstage"
-            }
-
-
-        :param string upload_id: the upload id
-        :resheader Content-Type: application/json
-        :status 200: upload unstaged successfully
-        :status 404: upload could not be found
-        :status 400: if the operation is not supported
-        :returns: the upload record
+        Only uploads that are sill in staging, not already delete, not still uploaded, and
+        not currently processed, can be deleted.
         """
         try:
             upload = Upload.get(upload_id)
         except KeyError:
             abort(404, message='Upload with id %s does not exist.' % upload_id)
 
-        if upload.user_id != str(g.user.user_id):
-            abort(404, message='Upload with id %s does not exist.' % upload_id)
-
-        json_data = request.get_json()
-        if json_data is None:
-            json_data = {}
-
-        operation = json_data.get('operation')
-        if operation == 'unstage':
-            upload.unstage()
-            return upload.json_dict, 200
+        if upload.user_id != str(g.user.user_id) and not g.user.is_admin:
+            abort(401, message='Upload with id %s does not belong to you.' % upload_id)
 
-        abort(400, message='Unsuported operation %s.' % operation)
+        if upload.tasks_running:
+            abort(400, message='The upload is not processed yet')
 
+        try:
+            upload.delete_upload()
+        except ProcessAlreadyRunning:
+            abort(400, message='The upload is still processed')
+        except Exception as e:
+            logger.error('could not delete processing upload', exc_info=e)
+            raise e
+
+        return upload, 200
+
+    @api.doc('exec_upload_command')
+    @api.response(404, 'Upload does not exist or not in staging')
+    @api.response(400, 'Operation is not supported or the upload is still/already processed')
+    @api.response(401, 'If the command is not allowed for the current user')
+    @api.marshal_with(upload_model, skip_none=True, code=200, description='Upload commited successfully')
+    @api.expect(upload_command_model)
     @login_really_required
-    def delete(self, upload_id):
+    def post(self, upload_id):
         """
-        Deletes an existing upload. Only ``is_ready`` or ``is_stale`` uploads
-        can be deleted. Deleting an upload in processing is not allowed.
-
-        .. :quickref: upload; Delete an existing upload.
+        Execute an upload command. Available operations: ``commit``
 
-        **Example request**:
+        Unstage accepts further meta data that allows to provide coauthors, comments,
+        external references, etc. See the model for details. The fields that start with
+        ``_underscore`` are only available for users with administrative priviledges.
 
-        .. sourcecode:: http
-
-            DELETE /nomad/api/uploads/5b89469e0d80d40008077dbc HTTP/1.1
-            Accept: application/json
-
-        :param string upload_id: the id for the upload
-        :resheader Content-Type: application/json
-        :status 200: upload successfully deleted
-        :status 400: upload cannot be deleted
-        :status 404: upload with id does not exist
-        :returns: the :class:`nomad.data.Upload` instance with the latest processing state
+        Unstage changes the visibility of the upload. Clients can specify the visibility
+        via meta data.
         """
         try:
             upload = Upload.get(upload_id)
         except KeyError:
             abort(404, message='Upload with id %s does not exist.' % upload_id)
 
-        if upload.user_id != str(g.user.user_id):
+        if upload.user_id != str(g.user.user_id) and not g.user.is_admin:
             abort(404, message='Upload with id %s does not exist.' % upload_id)
 
-        try:
-            upload.delete()
-            return upload.json_dict, 200
-        except NotAllowedDuringProcessing:
-            abort(400, message='You must not delete an upload during processing.')
-
-
-class UploadFileRes(Resource):
-    """
-    Upload a file to an existing upload. Can be used to upload files via bowser
-    or other http clients like curl. This will start the processing of the upload.
-
-    There are two basic ways to upload a file: multipart-formdata or simply streaming
-    the file data. Both are supported. The later one does not allow to transfer a
-    filename or other meta-data. If a filename is available, it will become the
-    name of the upload.
-
-    .. :quickref: upload; Upload a file to an existing upload.
-
-    **Curl examples for both approaches**:
-
-    .. sourcecode:: sh
-
-        curl -X put "/nomad/api/uploads/5b89469e0d80d40008077dbc/file" -F file=@local_file
-        curl "/nomad/api/uploads/5b89469e0d80d40008077dbc/file" --upload-file local_file
-
-    :param string upload_id: the upload_id of the upload
-    :resheader Content-Type: application/json
-    :status 200: upload successfully received.
-    :status 404: upload with given id does not exist
-    :status 400: if the fileformat is not supported or the form data is different than expected.
-    :returns: the upload (see GET /uploads/<upload_id>)
-    """
-    @login_really_required
-    def put(self, upload_id):
-        logger = get_logger(__name__, endpoint='upload', action='put', upload_id=upload_id)
+        json_data = request.get_json()
+        if json_data is None:
+            json_data = {}
 
-        try:
-            upload = Upload.get(upload_id)
-        except KeyError:
-            abort(404, message='Upload with id %s does not exist.' % upload_id)
+        command = json_data.get('command')
 
-        if upload.upload_time is not None:
-            abort(400, message='A file was already uploaded to this uploade before.')
+        metadata = json_data.get('metadata', {})
+        for key in metadata:
+            if key.startswith('_'):
+                if not g.user.is_admin:
+                    abort(401, message='Only admin users can use _metadata_keys.')
+                break
 
-        uploadFile = UploadFile(upload_id)
+        if command == 'commit':
+            if upload.tasks_running:
+                abort(400, message='The upload is not processed yet')
+            if upload.tasks_status == FAILURE:
+                abort(400, message='Cannot commit an upload that failed processing')
+            try:
+                upload.metadata = metadata
+                upload.commit_upload()
+            except ProcessAlreadyRunning:
+                abort(400, message='The upload is still/already processed')
 
-        if request.mimetype == 'application/multipart-formdata':
-            # multipart formdata, e.g. with curl -X put "url" -F file=@local_file
-            # might have performance issues for large files: https://github.com/pallets/flask/issues/2086
-            if 'file' in request.files:
-                abort(400, message='Bad multipart-formdata, there is no file part.')
-            file = request.files['file']
-            if upload.name is '':
-                upload.name = file.filename
+            return upload, 200
 
-            file.save(uploadFile.os_path)
-        else:
-            # simple streaming data in HTTP body, e.g. with curl "url" -T local_file
-            try:
-                uploadFile.create_dirs()
-                with uploadFile.open('wb') as f:
-                    while not request.stream.is_exhausted:
-                        f.write(request.stream.read(1024))
+        abort(400, message='Unsuported command %s.' % command)
 
-            except Exception as e:
-                logger.error('Error on streaming upload', exc_info=e)
-                abort(400, message='Some IO went wrong, download probably aborted/disrupted.')
 
-        if not uploadFile.is_valid:
-            uploadFile.delete()
-            abort(400, message='Bad file format, excpected %s.' % ", ".join(UploadFile.formats))
+upload_command_model = api.model('UploadCommand', {
+    'upload_url': fields.Url,
+    'upload_command': fields.String
+})
 
-        logger.info('received uploaded file')
-        upload.upload_time = datetime.now()
-        upload.process()
-        logger.info('initiated processing')
 
-        return upload.json_dict, 200
+@ns.route('/command')
+class UploadCommandResource(Resource):
+    @api.doc('get_upload_command')
+    @api.marshal_with(upload_command_model, code=200, description='Upload command send')
+    @login_really_required
+    def get(self):
+        """ Get url and example command for shell based uploads. """
+        upload_url = 'http://%s:%s%s/uploads/' % (
+            config.services.api_host,
+            config.services.api_port,
+            config.services.api_base_path)
 
+        upload_command = 'curl -X PUT -H "X-Token: %s" "%s" -F file=@<local_file>' % (
+            g.user.get_auth_token().decode('utf-8'), upload_url)
 
-api.add_resource(UploadsRes, '%s/uploads' % base_path)
-api.add_resource(UploadRes, '%s/uploads/<string:upload_id>' % base_path)
-api.add_resource(UploadFileRes, '%s/uploads/<string:upload_id>/file' % base_path)
+        return dict(upload_url=upload_url, upload_command=upload_command), 200
diff --git a/nomad/client.py b/nomad/client.py
deleted file mode 100644
index 5129837c29bdc74911a7919dbf2a98a06ff4b13b..0000000000000000000000000000000000000000
--- a/nomad/client.py
+++ /dev/null
@@ -1,354 +0,0 @@
-# Copyright 2018 Markus Scheidgen
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an"AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Simple client library for the nomad api that allows to bulk upload files via shell command.
-"""
-
-import os.path
-import os
-import sys
-import subprocess
-import shlex
-import time
-import requests
-from requests.auth import HTTPBasicAuth
-import click
-from typing import Union, Callable, cast
-import logging
-
-from nomad import config, utils
-from nomad.files import UploadFile
-from nomad.parsing import parsers, parser_dict, LocalBackend
-from nomad.normalizing import normalizers
-
-
-api_base = 'http://localhost/nomad/api'
-user = 'leonard.hofstadter@nomad-fairdi.tests.de'
-pw = 'password'
-
-
-def handle_common_errors(func):
-    def wrapper(*args, **kwargs):
-        try:
-            func(*args, **kwargs)
-        except requests.exceptions.ConnectionError:
-            click.echo(
-                '\nCould not connect to nomad at %s. '
-                'Check connection and host/port options.' % api_base)
-            sys.exit(0)
-    return wrapper
-
-
-@handle_common_errors
-def upload_file(file_path: str, name: str = None, offline: bool = False, unstage: bool = False):
-    """
-    Upload a file to nomad.
-
-    Arguments:
-        file_path: path to the file, absolute or relative to call directory
-        name: optional name, default is the file_path's basename
-        offline: allows to process data without upload, requires client to be run on the server
-        unstage: automatically unstage after successful processing
-    """
-    auth = HTTPBasicAuth(user, pw)
-
-    if name is None:
-        name = os.path.basename(file_path)
-
-    post_data = dict(name=name)
-    if offline:
-        post_data.update(dict(local_path=os.path.abspath(file_path)))
-        click.echo('process offline: %s' % file_path)
-
-    upload = requests.post('%s/uploads' % api_base, json=post_data, auth=auth).json()
-
-    if not offline:
-        upload_cmd = upload['upload_command']
-        upload_cmd = upload_cmd.replace('local_file', file_path)
-
-        subprocess.call(shlex.split(upload_cmd))
-
-        click.echo('uploaded: %s' % file_path)
-
-    while True:
-        upload = requests.get('%s/uploads/%s' % (api_base, upload['upload_id']), auth=auth).json()
-        status = upload['status']
-        calcs_pagination = upload['calcs'].get('pagination')
-        if calcs_pagination is None:
-            total, successes, failures = 0, 0, 0
-        else:
-            total, successes, failures = (
-                calcs_pagination[key] for key in ('total', 'successes', 'failures'))
-
-        ret = '\n' if status in ('SUCCESS', 'FAILURE') else '\r'
-
-        print(
-            'status: %s; task: %s; parsing: %d/%d/%d                %s' %
-            (status, upload['current_task'], successes, failures, total, ret), end='')
-
-        if status in ('SUCCESS', 'FAILURE'):
-            break
-
-        time.sleep(3)
-
-    if status == 'FAILURE':
-        click.echo('There have been errors:')
-        for error in upload['errors']:
-            click.echo('    %s' % error)
-    elif unstage:
-        post_data = dict(operation='unstage')
-        requests.post('%s/uploads/%s' % (api_base, upload['upload_id']), json=post_data, auth=auth).json()
-
-
-def walk_through_files(path, extension='.zip'):
-    """
-    Returns all abs path of all files in a sub tree of the given path that match
-    the given extension.
-
-    Arguments:
-        path (str): the directory
-        extension (str): the extension, incl. '.', e.g. '.zip' (default)
-    """
-
-    for (dirpath, _, filenames) in os.walk(path):
-        for filename in filenames:
-            if filename.endswith(extension):
-                yield os.path.abspath(os.path.join(dirpath, filename))
-
-
-class CalcProcReproduction:
-    """
-    Instances represent a local reproduction of the processing for a single calculation.
-    It allows to download raw data from a nomad server and reproduce its processing
-    (parsing, normalizing) with the locally installed parsers and normalizers.
-
-    The use-case is error/warning reproduction. Use ELK to identify errors, use
-    the upload, archive ids/hashes to given by ELK, and reproduce and fix the error
-    in your development environment.
-
-    This is a class of :class:`UploadFile` the downloaded raw data will be treated as
-    an fake 'upload' that only contains the respective calculation data. This allows us
-    to locally run processing code that is very similar to the one used on the server.
-
-    Arguments:
-        archive_id: The archive_id of the calculation to locally process.
-        override: Set to true to override any existing local calculation data.
-    """
-    def __init__(self, archive_id: str, override: bool = False) -> None:
-        self.calc_hash = utils.archive.calc_hash(archive_id)
-        self.upload_hash = utils.archive.upload_hash(archive_id)
-        self.mainfile = None
-        self.parser = None
-        self.logger = utils.get_logger(__name__, archive_id=archive_id)
-
-        local_path = os.path.join(config.fs.tmp, 'repro_%s.zip' % archive_id)
-        if not os.path.exists(os.path.dirname(local_path)):
-            os.makedirs(os.path.dirname(local_path))
-        if not os.path.exists(local_path) or override:
-            # download raw if not already downloaded or if override is set
-            # TODO currently only downloads mainfile
-            self.logger.info('Downloading calc.')
-            req = requests.get('%s/raw/%s?files=%s' % (api_base, self.upload_hash, self.mainfile), stream=True)
-            with open(local_path, 'wb') as f:
-                for chunk in req.iter_content(chunk_size=1024):
-                    f.write(chunk)
-        else:
-            self.logger.info('Calc already downloaded.')
-
-        self.upload_file = UploadFile(upload_id='tmp_%s' % archive_id, local_path=local_path)
-
-    def __enter__(self):
-        # open/extract upload file
-        self.logger.info('Extracting calc data.')
-        self.upload_file.__enter__()
-
-        # find mainfile matching calc_hash
-        self.mainfile = next(
-            filename for filename in self.upload_file.filelist
-            if utils.hash(filename) == self.calc_hash)
-
-        assert self.mainfile is not None, 'The mainfile could not be found.'
-        self.logger = self.logger.bind(mainfile=self.mainfile)
-        self.logger.info('Identified mainfile.')
-
-        return self
-
-    def __exit__(self, *args):
-        self.upload_file.__exit__(*args)
-
-    def parse(self, parser_name: str = None) -> LocalBackend:
-        """
-        Run the given parser on the downloaded calculation. If no parser is given,
-        do parser matching and use the respective parser.
-        """
-        mainfile = self.upload_file.get_file(self.mainfile)
-        if parser_name is not None:
-            parser = parser_dict.get(parser_name)
-        else:
-            for potential_parser in parsers:
-                with mainfile.open() as mainfile_f:
-                    if potential_parser.is_mainfile(self.mainfile, lambda fn: mainfile_f):
-                        parser = potential_parser
-                        break
-
-        assert parser is not None, 'there is not parser matching %s' % self.mainfile
-        self.logger = self.logger.bind(parser=parser.name)  # type: ignore
-        self.logger.info('identified parser')
-
-        parser_backend = parser.run(mainfile.os_path, logger=self.logger)
-        self.logger.info('ran parser')
-        return parser_backend
-
-    def normalize(self, normalizer: Union[str, Callable], parser_backend: LocalBackend = None):
-        """
-        Parse the downloaded calculation and run the given normalizer.
-        """
-        if parser_backend is None:
-            parser_backend = self.parse()
-
-        if isinstance(normalizer, str):
-            normalizer = next(
-                normalizer_instance for normalizer_instance in normalizers
-                if normalizer_instance.__class__.__name__ == normalizer)
-
-        assert normalizer is not None, 'there is no normalizer %s' % str(normalizer)
-        normalizer_instance = cast(Callable, normalizer)(parser_backend)
-        logger = self.logger.bind(normalizer=normalizer_instance.__class__.__name__)
-        self.logger.info('identified normalizer')
-
-        normalizer_instance.normalize(logger=logger)
-        self.logger.info('ran normalizer')
-        return parser_backend
-
-    def normalize_all(self, parser_backend: LocalBackend = None):
-        """
-        Parse the downloaded calculation and run the whole normalizer chain.
-        """
-        for normalizer in normalizers:
-            parser_backend = self.normalize(normalizer, parser_backend=parser_backend)
-
-        return parser_backend
-
-
-@click.group()
-@click.option('-h', '--host', default='localhost', help='The host nomad runs on, default is "localhost".')
-@click.option('-p', '--port', default=80, help='the port nomad runs with, default is 80.')
-@click.option('-v', '--verbose', help='sets log level to debug', is_flag=True)
-def cli(host: str, port: int, verbose: bool):
-    if verbose:
-        config.console_log_level = logging.DEBUG
-    else:
-        config.console_log_level = logging.WARNING
-
-    global api_base
-    api_base = 'http://%s:%d/nomad/api' % (host, port)
-
-
-@cli.command(
-    help='Upload files to nomad. The given path can be a single file or a directory. '
-    'All .zip files in a directory will be uploaded.')
-@click.argument('PATH', nargs=-1, required=True, type=click.Path(exists=True))
-@click.option(
-    '--name',
-    help='Optional name for the upload of a single file. Will be ignored on directories.')
-@click.option(
-    '--offline', is_flag=True, default=False,
-    help='Upload files "offline": files will not be uploaded, but processed were they are. '
-    'Only works when run on the nomad host.')
-@click.option(
-    '--unstage', is_flag=True, default=False,
-    help='Automatically move upload out of the staging area after successful processing')
-def upload(path, name: str, offline: bool, unstage: bool):
-    utils.configure_logging()
-    paths = path
-    click.echo('uploading files from %s paths' % len(paths))
-    for path in paths:
-        click.echo('uploading %s' % path)
-        if os.path.isfile(path):
-            name = name if name is not None else os.path.basename(path)
-            upload_file(path, name, offline, unstage)
-
-        elif os.path.isdir(path):
-            for file_path in walk_through_files(path):
-                name = os.path.basename(file_path)
-                upload_file(file_path, name, offline, unstage)
-
-        else:
-            click.echo('Unknown path type %s.' % path)
-
-
-@cli.command(help='Attempts to reset the nomad.')
-def reset():
-    response = requests.post('%s/admin/reset' % api_base, auth=HTTPBasicAuth(user, pw))
-    if response.status_code != 200:
-        click.echo('API return %s' % str(response.status_code))
-        click.echo(response.text)
-        sys.exit(1)
-
-
-@cli.command(help='Run processing locally.')
-@click.argument('ARCHIVE_ID', nargs=1, required=True, type=str)
-@click.option(
-    '--override', is_flag=True, default=False,
-    help='Override existing local calculation data.')
-def local(archive_id, **kwargs):
-    utils.configure_logging()
-    utils.get_logger(__name__).info('Using %s' % api_base)
-    with CalcProcReproduction(archive_id, **kwargs) as local:
-        backend = local.parse()
-        local.normalize_all(parser_backend=backend)
-        # backend.write_json(sys.stdout, pretty=True)
-
-
-@cli.group(help='Run a nomad service locally (outside docker).')
-def run():
-    pass
-
-
-@run.command(help='Run the nomad development worker.')
-def worker():
-    config.service = 'nomad_worker'
-    from nomad import processing
-    processing.app.worker_main(['worker', '--loglevel=INFO'])
-
-
-@run.command(help='Run the nomad development api.')
-def api():
-    config.service = 'nomad_api'
-    from nomad import infrastructure, api
-    infrastructure.setup()
-    api.app.run(debug=True, port=8000)
-
-
-@cli.command(help='Runs tests and linting. Useful before commit code.')
-@click.option('--skip-tests', help='Do not test, just do code checks.', is_flag=True)
-def qa(skip_tests: bool):
-    os.chdir(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
-    ret_code = 0
-    if not skip_tests:
-        click.echo('Run tests ...')
-        ret_code += os.system('python -m pytest tests')
-    click.echo('Run code style checks ...')
-    ret_code += os.system('python -m pycodestyle --ignore=E501,E701 nomad tests')
-    click.echo('Run linter ...')
-    ret_code += os.system('python -m pylint --load-plugins=pylint_mongoengine nomad tests')
-    click.echo('Run static type checks ...')
-    ret_code += os.system('python -m mypy --ignore-missing-imports --follow-imports=silent --no-strict-optional nomad tests')
-
-    sys.exit(ret_code)
-
-
-if __name__ == '__main__':
-    cli()  # pylint: disable=E1120
diff --git a/nomad/client/__init__.py b/nomad/client/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e5b3d901f82b0f575843f4dbf5da834d2a9ad9af
--- /dev/null
+++ b/nomad/client/__init__.py
@@ -0,0 +1,24 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Swagger/bravado based python client library for the API and various usefull shell commands.
+"""
+
+from . import local, migration, misc, upload
+from .main import cli, create_client
+
+
+if __name__ == '__main__':
+    cli()  # pylint: disable=E1120
diff --git a/nomad/client/local.py b/nomad/client/local.py
new file mode 100644
index 0000000000000000000000000000000000000000..031e133bb2dd523f720a91703ff4d46eefea150b
--- /dev/null
+++ b/nomad/client/local.py
@@ -0,0 +1,150 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+import os
+import requests
+import click
+from typing import Union, Callable, cast
+
+from nomad import config, utils
+from nomad.files import ArchiveBasedStagingUploadFiles
+from nomad.parsing import parsers, parser_dict, LocalBackend
+from nomad.normalizing import normalizers
+
+from .main import cli, api_base
+
+
+class CalcProcReproduction:
+    """
+    Instances represent a local reproduction of the processing for a single calculation.
+    It allows to download raw data from a nomad server and reproduce its processing
+    (parsing, normalizing) with the locally installed parsers and normalizers.
+
+    The use-case is error/warning reproduction. Use ELK to identify errors, use
+    the upload, archive ids to given by ELK, and reproduce and fix the error
+    in your development environment.
+
+    Arguments:
+        archive_id: The archive_id of the calculation to locally process.
+        override: Set to true to override any existing local calculation data.
+    """
+    def __init__(self, archive_id: str, override: bool = False) -> None:
+        self.calc_id = utils.archive.calc_id(archive_id)
+        self.upload_id = utils.archive.upload_id(archive_id)
+        self.mainfile = None
+        self.parser = None
+        self.logger = utils.get_logger(__name__, archive_id=archive_id)
+
+        local_path = os.path.join(config.fs.tmp, 'repro_%s.zip' % archive_id)
+        if not os.path.exists(os.path.dirname(local_path)):
+            os.makedirs(os.path.dirname(local_path))
+        if not os.path.exists(local_path) or override:
+            # download raw if not already downloaded or if override is set
+            # download with request, since bravado does not support streaming
+            # TODO currently only downloads mainfile
+            self.logger.info('Downloading calc.')
+            req = requests.get('%s/raw/%s/%s' % (api_base, self.upload_id, os.path.dirname(self.mainfile)), stream=True)
+            with open(local_path, 'wb') as f:
+                for chunk in req.iter_content(chunk_size=1024):
+                    f.write(chunk)
+        else:
+            self.logger.info('Calc already downloaded.')
+
+        self.upload_files = ArchiveBasedStagingUploadFiles(upload_id='tmp_%s' % archive_id, local_path=local_path)
+
+    def __enter__(self):
+        # open/extract upload file
+        self.logger.info('Extracting calc data.')
+        self.upload_files.extract()
+
+        # find mainfile matching calc_id
+        self.mainfile = next(
+            filename for filename in self.upload_files.raw_file_manifest()
+            if self.upload_files.calc_id(filename) == self.calc_id)
+
+        assert self.mainfile is not None, 'The mainfile could not be found.'
+        self.logger = self.logger.bind(mainfile=self.mainfile)
+        self.logger.info('Identified mainfile.')
+
+        return self
+
+    def __exit__(self, *args):
+        self.upload_files.delete()
+
+    def parse(self, parser_name: str = None) -> LocalBackend:
+        """
+        Run the given parser on the downloaded calculation. If no parser is given,
+        do parser matching and use the respective parser.
+        """
+        if parser_name is not None:
+            parser = parser_dict.get(parser_name)
+        else:
+            for potential_parser in parsers:
+                with self.upload_files.raw_file(self.mainfile) as mainfile_f:
+                    if potential_parser.is_mainfile(self.mainfile, lambda fn: mainfile_f):
+                        parser = potential_parser
+                        break
+
+        assert parser is not None, 'there is not parser matching %s' % self.mainfile
+        self.logger = self.logger.bind(parser=parser.name)  # type: ignore
+        self.logger.info('identified parser')
+
+        parser_backend = parser.run(self.upload_files.raw_file_object(self.mainfile).os_path, logger=self.logger)
+        self.logger.info('ran parser')
+        return parser_backend
+
+    def normalize(self, normalizer: Union[str, Callable], parser_backend: LocalBackend = None):
+        """
+        Parse the downloaded calculation and run the given normalizer.
+        """
+        if parser_backend is None:
+            parser_backend = self.parse()
+
+        if isinstance(normalizer, str):
+            normalizer = next(
+                normalizer_instance for normalizer_instance in normalizers
+                if normalizer_instance.__class__.__name__ == normalizer)
+
+        assert normalizer is not None, 'there is no normalizer %s' % str(normalizer)
+        normalizer_instance = cast(Callable, normalizer)(parser_backend)
+        logger = self.logger.bind(normalizer=normalizer_instance.__class__.__name__)
+        self.logger.info('identified normalizer')
+
+        normalizer_instance.normalize(logger=logger)
+        self.logger.info('ran normalizer')
+        return parser_backend
+
+    def normalize_all(self, parser_backend: LocalBackend = None):
+        """
+        Parse the downloaded calculation and run the whole normalizer chain.
+        """
+        for normalizer in normalizers:
+            parser_backend = self.normalize(normalizer, parser_backend=parser_backend)
+
+        return parser_backend
+
+
+@cli.command(help='Run processing locally.')
+@click.argument('ARCHIVE_ID', nargs=1, required=True, type=str)
+@click.option(
+    '--override', is_flag=True, default=False,
+    help='Override existing local calculation data.')
+def local(archive_id, **kwargs):
+    utils.configure_logging()
+    utils.get_logger(__name__).info('Using %s' % api_base)
+    with CalcProcReproduction(archive_id, **kwargs) as local:
+        backend = local.parse()
+        local.normalize_all(parser_backend=backend)
+        # backend.write_json(sys.stdout, pretty=True)
diff --git a/nomad/client/main.py b/nomad/client/main.py
new file mode 100644
index 0000000000000000000000000000000000000000..55f16f56b3d5e33df41f64773c42bf7f5f912c52
--- /dev/null
+++ b/nomad/client/main.py
@@ -0,0 +1,85 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import requests
+import click
+import logging
+from bravado.requests_client import RequestsClient
+from bravado.client import SwaggerClient
+
+from nomad import config
+
+
+api_base = 'http://%s:%d/%s' % (config.services.api_host, config.services.api_port, config.services.api_base_path)
+user = 'leonard.hofstadter@nomad-fairdi.tests.de'
+pw = 'password'
+
+
+def create_client():
+    return _create_client()
+
+
+def _create_client(
+        host: str = config.services.api_host,
+        port: int = config.services.api_port,
+        base_path: str = config.services.api_base_path,
+        user: str = user, password: str = pw):
+    """ A factory method to create the client. """
+
+    http_client = RequestsClient()
+    if user is not None:
+        http_client.set_basic_auth(host, user, pw)
+
+    client = SwaggerClient.from_url(
+        'http://%s:%d%s/swagger.json' % (host, port, base_path),
+        http_client=http_client)
+
+    return client
+
+
+def handle_common_errors(func):
+    def wrapper(*args, **kwargs):
+        try:
+            func(*args, **kwargs)
+        except requests.exceptions.ConnectionError:
+            click.echo(
+                '\nCould not connect to nomad at %s. '
+                'Check connection and host/port options.' % api_base)
+            sys.exit(0)
+    return wrapper
+
+
+@click.group()
+@click.option('-h', '--host', default=config.services.api_host, help='The host nomad runs on, default is "%s".' % config.services.api_host)
+@click.option('-p', '--port', default=config.services.api_port, help='the port nomad runs with, default is %d.' % config.services.api_port)
+@click.option('-u', '--user', default=None, help='the user name to login, default no login.')
+@click.option('-w', '--password', default=None, help='the password use to login.')
+@click.option('-v', '--verbose', help='sets log level to debug', is_flag=True)
+def cli(host: str, port: int, verbose: bool, user: str, password: str):
+    if verbose:
+        config.console_log_level = logging.DEBUG
+    else:
+        config.console_log_level = logging.WARNING
+
+    global api_base
+    api_base = 'http://%s:%d/nomad/api' % (host, port)
+
+    global create_client
+
+    def create_client():  # pylint: disable=W0612
+        if user is not None:
+            return _create_client(host=host, port=port, user=user, password=password)
+        else:
+            return _create_client(host=host, port=port)
diff --git a/nomad/client/migration.py b/nomad/client/migration.py
new file mode 100644
index 0000000000000000000000000000000000000000..559d35a658447f917d4eaa35a709d879742af969
--- /dev/null
+++ b/nomad/client/migration.py
@@ -0,0 +1,75 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import click
+import time
+import datetime
+
+from nomad import config, infrastructure
+from nomad.migration import NomadCOEMigration
+
+from .main import cli
+
+
+_migration: NomadCOEMigration = None
+
+
+@cli.group(help='Migrate data from NOMAD CoE to nomad@FAIRDI')
+@click.option('-h', '--host', default=config.migration_source_db.host, help='The migration repository source db host, default is "%s".' % config.migration_source_db.host)
+@click.option('-p', '--port', default=config.migration_source_db.port, help='The migration repository source db port, default is %d.' % config.migration_source_db.port)
+@click.option('-u', '--user', default=config.migration_source_db.user, help='The migration repository source db user, default is %s.' % config.migration_source_db.user)
+@click.option('-w', '--password', default=config.migration_source_db.password, help='The migration repository source db password.')
+@click.option('-db', '--dbname', default=config.migration_source_db.dbname, help='The migration repository source db name, default is %s.' % config.migration_source_db.dbname)
+def migration(host, port, user, password, dbname):
+    infrastructure.setup_logging()
+    infrastructure.setup_repository_db(
+        readony=True, host=host, port=port, user=user, password=password, dbname=dbname)
+    infrastructure.setup_mongo()
+
+    global _migration
+    _migration = NomadCOEMigration()
+
+
+@migration.command(help='Create/update the coe repository db migration index')
+@click.option('--drop', help='Drop the existing index, otherwise it will only add new data.', is_flag=True)
+@click.option('--with-metadata', help='Extract metadata for each calc and add it to the index.', is_flag=True)
+@click.option('--per-query', default=100, help='We index many objects with one query. Default is 100.')
+def index(drop, with_metadata, per_query):
+    start = time.time()
+    indexed_total = 0
+    indexed_calcs = 0
+    for calc, total in _migration.index(drop=drop, with_metadata=with_metadata, per_query=int(per_query)):
+        indexed_total += 1
+        indexed_calcs += 1 if calc is not None else 0
+        eta = total * ((time.time() - start) / indexed_total)
+        print(
+            'indexed: %8d, calcs: %8d, total: %8d, ETA: %s\r' %
+            (indexed_total, indexed_calcs, total, datetime.timedelta(seconds=eta)), end='')
+    print('done')
+
+
+@migration.command(help='Copy users from source into empty target db')
+@click.option('-h', '--host', default=config.repository_db.host, help='The migration repository target db host, default is "%s".' % config.repository_db.host)
+@click.option('-p', '--port', default=config.repository_db.port, help='The migration repository target db port, default is %d.' % config.repository_db.port)
+@click.option('-u', '--user', default=config.repository_db.user, help='The migration repository target db user, default is %s.' % config.repository_db.user)
+@click.option('-w', '--password', default=config.repository_db.password, help='The migration repository target db password.')
+@click.option('-db', '--dbname', default=config.repository_db.dbname, help='The migration repository target db name, default is %s.' % config.repository_db.dbname)
+def copy_users(**kwargs):
+    _, db = infrastructure.sqlalchemy_repository_db(readonly=False, **kwargs)
+    _migration.copy_users(db)
+
+
+@migration.command(help='Upload the given upload locations. Uses the existing index to provide user metadata.')
+def upload():
+    pass
diff --git a/nomad/client/misc.py b/nomad/client/misc.py
new file mode 100644
index 0000000000000000000000000000000000000000..760c52f91b8ce347646679498db8f2324f6861cf
--- /dev/null
+++ b/nomad/client/misc.py
@@ -0,0 +1,68 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+import os
+import sys
+import click
+
+from nomad import config
+
+from .main import cli
+
+
+@cli.command(help='Attempts to reset the nomad.')
+def reset():
+    from .main import create_client
+    create_client().admin.exec_reset_command().response()
+
+
+@cli.group(help='Run a nomad service locally (outside docker).')
+def run():
+    pass
+
+
+@run.command(help='Run the nomad development worker.')
+def worker():
+    config.service = 'nomad_worker'
+    from nomad import processing
+    processing.app.worker_main(['worker', '--loglevel=INFO'])
+
+
+@run.command(help='Run the nomad development api.')
+@click.option('--debug', help='Does run flask in debug.', is_flag=True)
+def api(debug: bool):
+    config.service = 'nomad_api'
+    from nomad import infrastructure
+    from nomad.api.__main__ import run_dev_server
+    infrastructure.setup()
+    run_dev_server(debug=debug, port=8000)
+
+
+@cli.command(help='Runs tests and linting. Useful before commit code.')
+@click.option('--skip-tests', help='Do not test, just do code checks.', is_flag=True)
+def qa(skip_tests: bool):
+    os.chdir(os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')))
+    ret_code = 0
+    if not skip_tests:
+        click.echo('Run tests ...')
+        ret_code += os.system('python -m pytest -svx tests')
+    click.echo('Run code style checks ...')
+    ret_code += os.system('python -m pycodestyle --ignore=E501,E701 nomad tests')
+    click.echo('Run linter ...')
+    ret_code += os.system('python -m pylint --load-plugins=pylint_mongoengine nomad tests')
+    click.echo('Run static type checks ...')
+    ret_code += os.system('python -m mypy --ignore-missing-imports --follow-imports=silent --no-strict-optional nomad tests')
+
+    sys.exit(ret_code)
diff --git a/nomad/client/upload.py b/nomad/client/upload.py
new file mode 100644
index 0000000000000000000000000000000000000000..815076e7f6d5b9e649ac03719ba723cb16621951
--- /dev/null
+++ b/nomad/client/upload.py
@@ -0,0 +1,108 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+import os
+import time
+import click
+
+from nomad import utils
+from nomad.processing import FAILURE, SUCCESS
+
+from .main import cli, create_client
+
+
+def upload_file(file_path: str, name: str = None, offline: bool = False, commit: bool = False, client=None):
+    """
+    Upload a file to nomad.
+
+    Arguments:
+        file_path: path to the file, absolute or relative to call directory
+        name: optional name, default is the file_path's basename
+        offline: allows to process data without upload, requires client to be run on the server
+        commit: automatically commit after successful processing
+
+    Returns: The upload_id
+    """
+    if client is None:
+        client = create_client()
+    if offline:
+        upload = client.uploads.upload(
+            local_path=os.path.abspath(file_path), name=name).reponse().result
+        click.echo('process offline: %s' % file_path)
+    else:
+        with open(file_path, 'rb') as f:
+            upload = client.uploads.upload(file=f, name=name).response().result
+        click.echo('process online: %s' % file_path)
+
+    while upload.tasks_status not in [SUCCESS, FAILURE]:
+        upload = client.uploads.get_upload(upload_id=upload.upload_id).response().result
+        calcs = upload.calcs.pagination
+        if calcs is None:
+            total, successes, failures = 0, 0, 0
+        else:
+            total, successes, failures = (calcs.total, calcs.successes, calcs.failures)
+
+        ret = '\n' if upload.tasks_status in (SUCCESS, FAILURE) else '\r'
+
+        print(
+            'status: %s; task: %s; parsing: %d/%d/%d                %s' %
+            (upload.tasks_status, upload.current_task, successes, failures, total, ret), end='')
+
+        time.sleep(1)
+
+    if upload.tasks_status == FAILURE:
+        click.echo('There have been errors:')
+        for error in upload.errors:
+            click.echo('    %s' % error)
+    elif commit:
+        client.uploads.exec_upload_command(upload_id=upload.upload_id, command='commit').reponse()
+
+    return upload.upload_id
+
+
+@cli.command(
+    help='Upload files to nomad. The given path can be a single file or a directory. '
+    'All .zip files in a directory will be uploaded.')
+@click.argument('PATH', nargs=-1, required=True, type=click.Path(exists=True))
+@click.option(
+    '--name',
+    help='Optional name for the upload of a single file. Will be ignored on directories.')
+@click.option(
+    '--offline', is_flag=True, default=False,
+    help='Upload files "offline": files will not be uploaded, but processed were they are. '
+    'Only works when run on the nomad host.')
+@click.option(
+    '--commit', is_flag=True, default=False,
+    help='Automatically move upload out of the staging area after successful processing')
+def upload(path, name: str, offline: bool, commit: bool):
+    utils.configure_logging()
+    paths = path
+    click.echo('uploading files from %s paths' % len(paths))
+    for path in paths:
+        click.echo('uploading %s' % path)
+        if os.path.isfile(path):
+            name = name if name is not None else os.path.basename(path)
+            upload_file(path, name, offline, commit)
+
+        elif os.path.isdir(path):
+            for (dirpath, _, filenames) in os.walk(path):
+                for filename in filenames:
+                    if filename.endswith('.zip'):
+                        file_path = os.path.abspath(os.path.join(dirpath, filename))
+                        name = os.path.basename(file_path)
+                        upload_file(file_path, name, offline, commit)
+
+        else:
+            click.echo('Unknown path type %s.' % path)
diff --git a/nomad/coe_repo.py b/nomad/coe_repo.py
deleted file mode 100644
index 751b3ca861941edb6998072dbe85d55925f6c557..0000000000000000000000000000000000000000
--- a/nomad/coe_repo.py
+++ /dev/null
@@ -1,348 +0,0 @@
-# Copyright 2018 Markus Scheidgen
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an"AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Interface to the NOMAD-coe repository postgres database. This implementation is based on
-SQLAlchemy. There are model classes that represent entries in the *users* and *session*
-tables.
-
-This module allows to authenticate users based on user password or session tokens.
-It allows to access the user data like names and user_id.
-
-.. autoclass:: User
-    :members:
-    :undoc-members:
-
-.. autoclass:: Session
-    :members:
-    :undoc-members:
-
-.. autofunction:: ensure_test_user
-
-This module also provides functionality to add parsed calculation data to the db:
-
-.. autofunction:: add_upload
-"""
-
-import itertools
-from passlib.hash import bcrypt
-from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey
-from sqlalchemy.orm import relationship
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.dialects.postgresql import BYTEA
-
-from nomad import utils, infrastructure
-from nomad.repo import RepoCalc
-
-
-Base = declarative_base()
-
-
-def add_upload(upload, restricted: bool) -> int:
-    """
-    Add the processed upload to the NOMAD-coe repository db. It creates an
-    uploads-entry, respective calculation and property entries. Everything in one
-    transaction. Triggers an updates the NOMAD-coe repository elastic search index after
-    success.
-
-    TODO deal with the restricted parameter
-    """
-    repo_db = infrastructure.repository_db
-    repo_db.begin()
-
-    logger = utils.get_logger(
-        __name__,
-        upload_id=upload.upload_id,
-        upload_hash=upload.upload_hash)
-
-    result = None
-
-    try:
-        # create upload
-        coe_upload = Upload(
-            upload_name=upload.upload_hash,
-            created=upload.upload_time,
-            user_id=int(upload.user_id),
-            is_processed=True)
-        repo_db.add(coe_upload)
-
-        # add calculations and metadata
-        has_calcs = False
-        for repo_calc in RepoCalc.upload_calcs(upload.upload_id):
-            has_calcs = True
-            add_calculation(upload, coe_upload, repo_calc, restricted)
-
-        # commit
-        if has_calcs:
-            # empty upload case
-            repo_db.commit()
-            result = coe_upload.upload_id
-        else:
-            repo_db.rollback()
-    except Exception as e:
-        logger.error('Unexpected exception.', exc_info=e)
-        repo_db.rollback()
-        raise e
-
-    # trigger index update
-    pass
-
-    return result
-
-
-def add_calculation(upload, coe_upload, calc: RepoCalc, restricted: bool) -> None:
-    repo_db = infrastructure.repository_db
-
-    # table based properties
-    coe_calc = Calc(checksum=calc.calc_hash, upload=coe_upload)
-    repo_db.add(coe_calc)
-
-    program_version = calc.program_version  # TODO shorten version names
-    code_version = repo_db.query(CodeVersion).filter_by(content=program_version).first()
-    if code_version is None:
-        code_version = CodeVersion(content=program_version)
-        repo_db.add(code_version)
-
-    filenames = itertools.chain([calc.mainfile], calc.aux_files)
-
-    metadata = CalcMetaData(
-        calc=coe_calc,
-        added=upload.upload_time,
-        chemical_formula=calc.chemical_composition,
-        filenames=('[%s]' % ','.join(['"%s"' % filename for filename in filenames])).encode('utf-8'),
-        location=calc.mainfile,
-        version=code_version)
-    repo_db.add(metadata)
-
-    struct_ratio = StructRatio(
-        calc=coe_calc,
-        chemical_formula=calc.chemical_composition,
-        formula_units=1, nelem=1)
-    repo_db.add(struct_ratio)
-
-    user_metadata = UserMetaData(
-        calc=coe_calc,
-        permission=0 if not restricted else 1)
-    repo_db.add(user_metadata)
-
-    spacegroup = Spacegroup(
-        calc=coe_calc,
-        n=int(calc.space_group_number)
-    )
-    repo_db.add(spacegroup)
-
-    # topic based properties
-    coe_calc.set_value(topic_code, calc.program_name)
-    for atom in set(calc.atom_species):
-        coe_calc.set_value(topic_atoms, str(atom))  # TODO atom label not number
-    coe_calc.set_value(topic_system_type, calc.system_type)
-    coe_calc.set_value(topic_xc_treatment, calc.XC_functional_name)  # TODO function->treatment
-    coe_calc.set_value(topic_crystal_system, calc.crystal_system)
-    coe_calc.set_value(topic_basis_set_type, calc.basis_set_type)
-
-
-class Calc(Base):  # type: ignore
-    __tablename__ = 'calculations'
-
-    calc_id = Column(Integer, primary_key=True, autoincrement=True)
-    origin_id = Column(Integer, ForeignKey('uploads.upload_id'))
-    upload = relationship('Upload')
-    checksum = Column(String)
-
-    def set_value(self, topic_cid: int, value: str) -> None:
-        if value is None:
-            return
-
-        repo_db = infrastructure.repository_db
-        topic = repo_db.query(Topics).filter_by(topic=value).first()
-        if not topic:
-            topic = Topics(cid=topic_cid, topic=value)
-            repo_db.add(topic)
-
-        tag = Tag(calc=self, topic=topic)
-        repo_db.add(tag)
-
-
-class CalcMetaData(Base):  # type: ignore
-    __tablename__ = 'metadata'
-
-    calc_id = Column(Integer, ForeignKey('calculations.calc_id'), primary_key=True)
-    calc = relationship('Calc')
-    added = Column(DateTime)
-    chemical_formula = Column(String)
-    filenames = Column(BYTEA)
-    location = Column(String)
-    version_id = Column(Integer, ForeignKey('codeversions.version_id'))
-    version = relationship('CodeVersion')
-
-
-class UserMetaData(Base):  # type: ignore
-    __tablename__ = 'user_metadata'
-
-    calc_id = Column(Integer, ForeignKey('calculations.calc_id'), primary_key=True)
-    calc = relationship('Calc')
-    permission = Column(Integer)
-
-
-class StructRatio(Base):  # type: ignore
-    __tablename__ = 'struct_ratios'
-
-    calc_id = Column(Integer, ForeignKey('calculations.calc_id'), primary_key=True)
-    calc = relationship('Calc')
-    formula_units = Column(Integer)
-    nelem = Column(Integer)
-    chemical_formula = Column(String)
-
-
-class CodeVersion(Base):  # type: ignore
-    __tablename__ = 'codeversions'
-
-    version_id = Column(Integer, primary_key=True, autoincrement=True)
-    content = Column(String)
-
-
-class Spacegroup(Base):  # type: ignore
-    __tablename__ = 'spacegroups'
-
-    calc_id = Column(Integer, ForeignKey('calculations.calc_id'), primary_key=True)
-    calc = relationship('Calc')
-    n = Column(Integer)
-
-
-class Tag(Base):  # type: ignore
-    __tablename__ = 'tags'
-    calc_id = Column(Integer, ForeignKey('calculations.calc_id'), primary_key=True)
-    calc = relationship('Calc')
-    tid = Column(Integer, ForeignKey('topics.tid'), primary_key=True)
-    topic = relationship('Topics')
-
-    def __repr__(self):
-        return '<Tag(calc_id="%d", tid="%d)>' % (int(self.calc_id), int(self.tid))
-
-
-topic_code = 220
-topic_atoms = 10
-topic_system_type = 50
-topic_xc_treatment = 75
-topic_crystal_system = 90
-topic_basis_set_type = 80
-
-
-class Topics(Base):  # type: ignore
-    __tablename__ = 'topics'
-    tid = Column(Integer, primary_key=True, autoincrement=True)
-    cid = Column(Integer)
-    topic = Column(String)
-
-
-class Upload(Base):  # type: ignore
-    __tablename__ = 'uploads'
-
-    upload_id = Column(Integer, primary_key=True, autoincrement=True)
-    upload_name = Column(String)
-    user_id = Column(Integer, ForeignKey('users.user_id'))
-    user = relationship('User')
-    is_processed = Column(Boolean)
-    created = Column(DateTime)
-
-
-class Session(Base):  # type: ignore
-    __tablename__ = 'sessions'
-
-    token = Column(String, primary_key=True)
-    user_id = Column(String)
-
-
-class LoginException(Exception):
-    pass
-
-
-class User(Base):  # type: ignore
-    """
-    SQLAlchemy model class that represents NOMAD-coe repository postgresdb *users*.
-    Provides functions for authenticating via password or session token.
-
-    It is not intended to create or update users. This should be done via the
-    NOMAD-coe repository GUI.
-    """
-    __tablename__ = 'users'
-
-    user_id = Column(Integer, primary_key=True)
-    email = Column(String)
-    firstname = Column(String)
-    lastname = Column(String)
-    password = Column(String)
-
-    def __repr__(self):
-        return '<User(email="%s")>' % self.email
-
-    def _hash_password(self, password):
-        assert False, 'Login functions are done by the NOMAD-coe repository GUI'
-        # password_hash = bcrypt.encrypt(password, ident='2y')
-        # self.password = password_hash
-
-    def _verify_password(self, password):
-        return bcrypt.verify(password, self.password)
-
-    def _generate_auth_token(self, expiration=600):
-        assert False, 'Login functions are done by the NOMAD-coe repository GUI'
-
-    def get_auth_token(self):
-        repo_db = infrastructure.repository_db
-        session = repo_db.query(Session).filter_by(user_id=self.user_id).first()
-        if not session:
-            raise LoginException('No session, user probably not logged in at NOMAD-coe repository GUI')
-
-        return session.token.encode('utf-8')
-
-    @staticmethod
-    def verify_user_password(email, password):
-        repo_db = infrastructure.repository_db
-        user = repo_db.query(User).filter_by(email=email).first()
-        if not user:
-            return None
-
-        if user._verify_password(password):
-            return user
-        else:
-            raise LoginException('Wrong password')
-
-    @staticmethod
-    def verify_auth_token(token):
-        repo_db = infrastructure.repository_db
-        session = repo_db.query(Session).filter_by(token=token).first()
-        if session is None:
-            return None
-
-        user = repo_db.query(User).filter_by(user_id=session.user_id).first()
-        assert user, 'User in sessions must exist.'
-        return user
-
-
-def ensure_test_user(email):
-    """
-    Allows tests to make sure that the default test users exist in the database.
-    Returns:
-        The user as :class:`User` instance.
-    """
-    repo_db = infrastructure.repository_db
-    existing = repo_db.query(User).filter_by(email=email).first()
-    assert existing, 'Test user %s does not exist.' % email
-
-    session = repo_db.query(Session).filter_by(
-        user_id=existing.user_id).first()
-    assert session, 'Test user %s has no session.' % email
-    assert session.token == email, 'Test user %s session has unexpected token.' % email
-
-    return existing
diff --git a/nomad/coe_repo/__init__.py b/nomad/coe_repo/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..07ff4ab8c9b1284f7fe4a20d498bb4bdf3ed4694
--- /dev/null
+++ b/nomad/coe_repo/__init__.py
@@ -0,0 +1,47 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Interface to the NOMAD-coe repository postgres database. This implementation is based on
+SQLAlchemy. There are model classes that represent entries in the *users* and *session*
+tables.
+
+This module allows to authenticate users based on user password or session tokens.
+It allows to access the user data like names and user_id.
+
+.. autoclass:: User
+    :members:
+    :undoc-members:
+
+.. autoclass:: Session
+    :members:
+    :undoc-members:
+
+.. autofunction:: ensure_test_user
+
+This module also provides functionality to add parsed calculation data to the db:
+
+.. autoclass:: UploadMetaData
+    :members:
+.. autoclass:: Upload
+    :members:
+    :undoc-members:
+.. autoclass:: Calc
+    :members:
+    :undoc-members:
+"""
+
+from .user import User, ensure_test_user, admin_user, LoginException
+from .calc import Calc, DataSet
+from .upload import UploadMetaData, Upload
diff --git a/nomad/coe_repo/base.py b/nomad/coe_repo/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..8768239c13c652e31620cfdb18c0d1bf1c3ec190
--- /dev/null
+++ b/nomad/coe_repo/base.py
@@ -0,0 +1,141 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Declarative SQLAlchemy base model definitions for the repository db schema. Does
+not include the *big* datamodel entities: `User`, `Upload`, `Calc`; they can
+be found in their own submodules.
+"""
+
+from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Enum, Table
+from sqlalchemy.orm import relationship
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.dialects.postgresql import BYTEA
+from sqlalchemy.ext.declarative import declarative_base
+
+
+Base = declarative_base()
+
+calc_citation_association = Table(
+    'metadata_citations', Base.metadata,
+    Column('calc_id', Integer, ForeignKey('calculations.calc_id')),
+    Column('citation_id', Integer, ForeignKey('citations.citation_id')))
+
+
+ownership = Table(
+    'ownerships', Base.metadata,
+    Column('calc_id', Integer, ForeignKey('calculations.calc_id')),
+    Column('user_id', Integer, ForeignKey('users.user_id')))
+
+co_authorship = Table(
+    'coauthorships', Base.metadata,
+    Column('calc_id', Integer, ForeignKey('calculations.calc_id')),
+    Column('user_id', Integer, ForeignKey('users.user_id')))
+
+shareship = Table(
+    'shareships', Base.metadata,
+    Column('calc_id', Integer, ForeignKey('calculations.calc_id')),
+    Column('user_id', Integer, ForeignKey('users.user_id')))
+
+
+class CalcMetaData(Base):  # type: ignore
+    __tablename__ = 'metadata'
+
+    calc_id = Column(Integer, ForeignKey('calculations.calc_id'), primary_key=True)
+    calc = relationship('Calc')
+    added = Column(DateTime)
+    chemical_formula = Column(String)
+    filenames = Column(BYTEA)
+    location = Column(String)
+    version_id = Column(Integer, ForeignKey('codeversions.version_id'))
+    version = relationship('CodeVersion', lazy='joined', uselist=False)
+
+
+class UserMetaData(Base):  # type: ignore
+    __tablename__ = 'user_metadata'
+
+    calc_id = Column(Integer, ForeignKey('calculations.calc_id'), primary_key=True)
+    label = Column(String)
+    calc = relationship('Calc')
+    permission = Column(Integer)
+
+
+class StructRatio(Base):  # type: ignore
+    __tablename__ = 'struct_ratios'
+
+    calc_id = Column(Integer, ForeignKey('calculations.calc_id'), primary_key=True)
+    calc = relationship('Calc')
+    formula_units = Column(Integer)
+    nelem = Column(Integer)
+    chemical_formula = Column(String)
+
+
+class CodeVersion(Base):  # type: ignore
+    __tablename__ = 'codeversions'
+
+    version_id = Column(Integer, primary_key=True, autoincrement=True)
+    content = Column(String)
+
+
+class Spacegroup(Base):  # type: ignore
+    __tablename__ = 'spacegroups'
+
+    calc_id = Column(Integer, ForeignKey('calculations.calc_id'), primary_key=True)
+    calc = relationship('Calc')
+    n = Column(Integer)
+
+
+topic_code = 220
+topic_atoms = 10
+topic_system_type = 50
+topic_xc_treatment = 75
+topic_crystal_system = 90
+topic_basis_set_type = 80
+
+
+class Tag(Base):  # type: ignore
+    __tablename__ = 'tags'
+    calc_id = Column(Integer, ForeignKey('calculations.calc_id'), primary_key=True)
+    calc = relationship('Calc')
+    tid = Column(Integer, ForeignKey('topics.tid'), primary_key=True)
+    topic = relationship('Topics', lazy='joined', uselist=False)
+
+    def __repr__(self):
+        return '<Tag(calc_id="%d", tid="%d)>' % (int(self.calc_id), int(self.tid))
+
+
+class Topics(Base):  # type: ignore
+    __tablename__ = 'topics'
+    tid = Column(Integer, primary_key=True, autoincrement=True)
+    cid = Column(Integer)
+    topic = Column(String)
+
+
+class CalcSet(Base):  # type: ignore
+    __tablename__ = 'calcsets'
+
+    parent_calc_id = Column(Integer, ForeignKey('calculations.calc_id'), primary_key=True)
+    children_calc_id = Column(Integer, ForeignKey('calculations.calc_id'), primary_key=True)
+
+
+calc_dataset_containment = Table(
+    'calcsets', Base.metadata, extend_existing=True)
+
+
+class Citation(Base):  # type: ignore
+    __tablename__ = 'citations'
+
+    citation_id = Column(Integer, primary_key=True)
+    value = Column(String)
+    kind = Column(Enum('INTERNAL', 'EXTERNAL', name='citation_kind_enum'))
diff --git a/nomad/coe_repo/calc.py b/nomad/coe_repo/calc.py
new file mode 100644
index 0000000000000000000000000000000000000000..4ae065ca2c808a1c84e8961fb42d602490c28f4f
--- /dev/null
+++ b/nomad/coe_repo/calc.py
@@ -0,0 +1,204 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import List
+import json
+from sqlalchemy import Column, Integer, String, ForeignKey
+from sqlalchemy.orm import relationship, aliased
+from sqlalchemy.sql.expression import literal
+
+from nomad import infrastructure, datamodel
+from nomad.datamodel import CalcWithMetadata
+
+from . import base
+from .user import User
+from .base import Base, calc_citation_association, ownership, co_authorship, shareship, \
+    Tag, Topics, CalcSet, calc_dataset_containment, Citation
+
+
+class Calc(Base, datamodel.Calc):  # type: ignore
+    __tablename__ = 'calculations'
+
+    coe_calc_id = Column('calc_id', Integer, primary_key=True, autoincrement=True)
+    origin_id = Column(Integer, ForeignKey('uploads.upload_id'))
+    upload = relationship('Upload')
+    checksum = Column(String)
+
+    calc_metadata = relationship('CalcMetaData', uselist=False, lazy='joined')
+    user_metadata = relationship('UserMetaData', uselist=False, lazy='joined')
+    citations = relationship('Citation', secondary=calc_citation_association, lazy='joined')
+    owners = relationship('User', secondary=ownership, lazy='joined')
+    coauthors = relationship('User', secondary=co_authorship, lazy='joined')
+    shared_with = relationship('User', secondary=shareship, lazy='joined')
+    tags = relationship('Tag', lazy='joined')
+    spacegroup = relationship('Spacegroup', lazy='joined', uselist=False)
+
+    parents = relationship(
+        'Calc',
+        secondary=calc_dataset_containment,
+        primaryjoin=calc_dataset_containment.c.children_calc_id == coe_calc_id,
+        secondaryjoin=calc_dataset_containment.c.parent_calc_id == coe_calc_id,
+        backref='children', lazy='subquery', join_depth=1)
+
+    @classmethod
+    def load_from(cls, obj):
+        repo_db = infrastructure.repository_db
+        return repo_db.query(Calc).filter_by(coe_calc_id=int(obj.pid)).first()
+
+    @property
+    def mainfile(self) -> str:
+        return self.calc_metadata.location
+
+    @property
+    def pid(self):
+        return self.coe_calc_id
+
+    @property
+    def comment(self) -> str:
+        return self.user_metadata.label
+
+    @property
+    def calc_id(self) -> str:
+        return self.checksum
+
+    @property
+    def references(self) -> List[str]:
+        return list(citation.value for citation in self.citations if citation.kind == 'EXTERNAL')
+
+    @property
+    def uploader(self) -> User:
+        assert len(self.owners) == 1, 'A calculation must have exactly one owner.'
+        return self.owners[0]
+
+    @property
+    def with_embargo(self) -> bool:
+        return self.user_metadata.permission == 1
+
+    @property
+    def chemical_formula(self) -> str:
+        return self.calc_metadata.chemical_formula
+
+    @property
+    def filenames(self) -> List[str]:
+        filenames = self.calc_metadata.filenames.decode('utf-8')
+        return json.loads(filenames)
+
+    @property
+    def all_datasets(self) -> List['DataSet']:
+        assert self.coe_calc_id is not None
+        repo_db = infrastructure.repository_db
+        query = repo_db.query(literal(self.coe_calc_id).label('coe_calc_id')).cte(recursive=True)
+        right = aliased(query)
+        left = aliased(CalcSet)
+        query = query.union_all(repo_db.query(left.parent_calc_id).join(
+            right, right.c.coe_calc_id == left.children_calc_id))
+        query = repo_db.query(query)
+        dataset_calc_ids = list(r[0] for r in query if not r[0] == self.coe_calc_id)
+        if len(dataset_calc_ids) > 0:
+            return [
+                DataSet(dataset_calc)
+                for dataset_calc in repo_db.query(Calc).filter(Calc.coe_calc_id.in_(dataset_calc_ids))]
+        else:
+            return []
+
+    @property
+    def direct_datasets(self) -> List['DataSet']:
+        return [DataSet(dataset_calc) for dataset_calc in self.parents]
+
+    def set_value(self, topic_cid: int, value: str) -> None:
+        if value is None:
+            return
+
+        repo_db = infrastructure.repository_db
+        topic = repo_db.query(Topics).filter_by(topic=value).first()
+        if not topic:
+            topic = Topics(cid=topic_cid, topic=value)
+            repo_db.add(topic)
+
+        tag = Tag(calc=self, topic=topic)
+        repo_db.add(tag)
+
+    _dataset_cache: dict = {}
+
+    def to_calc_with_metadata(self):
+        result = CalcWithMetadata(
+            upload_id=self.upload.upload_id if self.upload else None,
+            calc_id=self.calc_id)
+
+        for topic in [tag.topic for tag in self.tags]:
+            if topic.cid == base.topic_code:
+                result.program_name = topic.topic
+            elif topic.cid == base.topic_basis_set_type:
+                result.basis_set_type = topic.topic
+            elif topic.cid == base.topic_xc_treatment:
+                result.XC_functional_name = topic.topic
+            elif topic.cid == base.topic_system_type:
+                result.system_type = topic.topic
+            elif topic.cid == base.topic_atoms:
+                result.setdefault('atom_labels', []).append(topic.topic)
+            elif topic.cid == base.topic_crystal_system:
+                result.crystal_system = topic.topic
+            else:
+                raise KeyError('topic cid %s.' % str(topic.cid))
+
+        result.program_version = self.calc_metadata.version.content
+        result.chemical_composition = self.calc_metadata.chemical_formula
+        result.space_group_number = self.spacegroup.n
+        result.setdefault('atom_labels', []).sort()
+
+        datasets: List[DataSet] = []
+        for parent in self.parents:
+            parents = Calc._dataset_cache.get(parent, None)
+            if parents is None:
+                parents = parent.all_datasets
+                Calc._dataset_cache[parent] = parents
+            datasets.append(DataSet(parent))
+            datasets.extend(parents)
+
+        result.pid = self.pid
+        result.uploader = self.uploader.user_id
+        result.upload_time = self.calc_metadata.added
+        result.datasets = list(
+            dict(id=ds.id, dois=ds.dois, name=ds.name)
+            for ds in datasets)
+        result.with_embargo = self.with_embargo
+        result.comment = self.comment
+        result.references = self.references
+        result.coauthors = list(user.user_id for user in self.coauthors)
+        result.shared_with = list(user.user_id for user in self.shared_with)
+
+        return {
+            key: value for key, value in result.items()
+            if value is not None and value != []
+        }
+
+
+CalcWithMetadata.register_mapping(Calc, Calc.to_calc_with_metadata)
+
+
+class DataSet:
+    def __init__(self, dataset_calc: Calc) -> None:
+        self._dataset_calc = dataset_calc
+
+    @property
+    def id(self):
+        return self._dataset_calc.coe_calc_id
+
+    @property
+    def dois(self) -> List[Citation]:
+        return list(citation.value for citation in self._dataset_calc.citations if citation.kind == 'INTERNAL')
+
+    @property
+    def name(self):
+        return self._dataset_calc.calc_metadata.chemical_formula
diff --git a/nomad/coe_repo/upload.py b/nomad/coe_repo/upload.py
new file mode 100644
index 0000000000000000000000000000000000000000..1d4a03787ac32bee9ff20ea4b832b001843f1f84
--- /dev/null
+++ b/nomad/coe_repo/upload.py
@@ -0,0 +1,249 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Interface to the NOMAD-coe repository postgres database. This implementation is based on
+SQLAlchemy. There are model classes that represent entries in the *users* and *session*
+tables.
+
+This module allows to authenticate users based on user password or session tokens.
+It allows to access the user data like names and user_id.
+
+.. autoclass:: User
+    :members:
+    :undoc-members:
+
+.. autoclass:: Session
+    :members:
+    :undoc-members:
+
+.. autofunction:: ensure_test_user
+
+This module also provides functionality to add parsed calculation data to the db:
+
+.. autoclass:: UploadMetaData
+    :members:
+.. autoclass:: Upload
+    :members:
+    :undoc-members:
+.. autoclass:: Calc
+    :members:
+    :undoc-members:
+"""
+
+from typing import Type
+import datetime
+from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey
+from sqlalchemy.orm import relationship
+
+from nomad import utils, infrastructure, datamodel
+from nomad.datamodel import CalcWithMetadata
+
+from . import base
+from .user import User
+from .calc import Calc
+from .base import Base, CalcMetaData, UserMetaData, StructRatio, CodeVersion, Spacegroup, \
+    CalcSet, Citation
+
+
+class UploadMetaData:
+    """
+    Utility class that provides per upload meta data and overriding per calculation
+    meta data. For a given *mainfile* data is first read from the `calculations` key
+    (a list of calculation dict with a matching `mainfile` key), before it is read
+    from :param:`metadata_dict` it self.
+
+    The class is used to deal with user provided meta-data.
+
+    Arguments:
+        metadata_dict: The python dict with the meta-data.
+    """
+    def __init__(self, metadata_dict: dict) -> None:
+        self._upload_data = metadata_dict
+        self._calc_data: dict = {
+            calc['mainfile']: calc
+            for calc in self._upload_data.get('calculations', [])}
+
+    def get(self, mainfile: str) -> dict:
+        return self._calc_data.get(mainfile, self._upload_data)
+
+
+class Upload(Base, datamodel.Upload):  # type: ignore
+    __tablename__ = 'uploads'
+
+    coe_upload_id = Column('upload_id', Integer, primary_key=True, autoincrement=True)
+    upload_name = Column(String)
+    user_id = Column(Integer, ForeignKey('users.user_id'))
+    is_processed = Column(Boolean)
+    created = Column(DateTime)
+
+    user = relationship('User')
+    calcs = relationship('Calc')
+
+    @classmethod
+    def load_from(cls, obj):
+        return Upload.from_upload_id(str(obj.upload_id))
+
+    @staticmethod
+    def from_upload_id(upload_id: str) -> 'Upload':
+        repo_db = infrastructure.repository_db
+        uploads = repo_db.query(Upload).filter_by(upload_name=upload_id)
+        assert uploads.count() <= 1, 'Upload id/name must be unique'
+        return uploads.first()
+
+    @property
+    def upload_id(self) -> str:
+        return self.upload_name
+
+    @property
+    def uploader(self) -> 'User':
+        return self.user
+
+    @property
+    def upload_time(self) -> Type[datetime.datetime]:
+        return self.created
+
+    @staticmethod
+    def add(upload: datamodel.Upload, metadata: dict = {}) -> int:
+        """
+        Add the upload to the NOMAD-coe repository db. It creates an
+        uploads-entry, respective calculation and property entries. Everything in one
+        transaction.
+
+        Triggers and updates the NOMAD-coe repository elastic search index after
+        success (TODO).
+
+        Arguments:
+            upload: The upload to add.
+            upload_metadata: A dictionary with additional meta data (e.g. user provided
+                meta data) that should be added to upload and calculations.
+        """
+        upload_metadata = UploadMetaData(metadata)
+        repo_db = infrastructure.repository_db
+        repo_db.begin()
+
+        logger = utils.get_logger(__name__, upload_id=upload.upload_id)
+
+        result = None
+
+        try:
+            # create upload
+            coe_upload = Upload(
+                upload_name=upload.upload_id,
+                created=metadata.get('_upload_time', upload.upload_time),
+                user=upload.uploader,
+                is_processed=True)
+            repo_db.add(coe_upload)
+
+            # add calculations and metadata
+            has_calcs = False
+            for calc in upload.calcs:
+                has_calcs = True
+                coe_upload._add_calculation(calc.to(CalcWithMetadata), upload_metadata.get(calc.mainfile))
+
+            # commit
+            if has_calcs:
+                # empty upload case
+                repo_db.commit()
+                result = coe_upload.coe_upload_id
+            else:
+                repo_db.rollback()
+        except Exception as e:
+            logger.error('Unexpected exception.', exc_info=e)
+            repo_db.rollback()
+            raise e
+
+        # TODO trigger index update
+        pass
+
+        return result
+
+    def _add_calculation(self, calc: CalcWithMetadata, calc_metadata: dict) -> None:
+        repo_db = infrastructure.repository_db
+
+        # table based properties
+        coe_calc = Calc(
+            coe_calc_id=calc_metadata.get('_pid', None),
+            checksum=calc_metadata.get('_checksum', calc.calc_id),
+            upload=self)
+        repo_db.add(coe_calc)
+
+        program_version = calc.program_version  # TODO shorten version names
+        code_version = repo_db.query(CodeVersion).filter_by(content=program_version).first()
+        if code_version is None:
+            code_version = CodeVersion(content=program_version)
+            repo_db.add(code_version)
+
+        metadata = CalcMetaData(
+            calc=coe_calc,
+            added=calc_metadata.get('_upload_time', self.upload_time),
+            chemical_formula=calc.chemical_composition,
+            filenames=('[%s]' % ','.join(['"%s"' % filename for filename in calc.files])).encode('utf-8'),
+            location=calc.mainfile,
+            version=code_version)
+        repo_db.add(metadata)
+
+        struct_ratio = StructRatio(
+            calc=coe_calc,
+            chemical_formula=calc.chemical_composition,
+            formula_units=1, nelem=1)
+        repo_db.add(struct_ratio)
+
+        user_metadata = UserMetaData(
+            calc=coe_calc,
+            label=calc_metadata.get('comment', None),
+            permission=(1 if calc_metadata.get('with_embargo', False) else 0))
+        repo_db.add(user_metadata)
+
+        spacegroup = Spacegroup(
+            calc=coe_calc,
+            n=int(calc.space_group_number)
+        )
+        repo_db.add(spacegroup)
+
+        # topic based properties
+        coe_calc.set_value(base.topic_code, calc.program_name)
+        for atom in set(calc.atom_labels):
+            coe_calc.set_value(base.topic_atoms, str(atom))
+        coe_calc.set_value(base.topic_system_type, calc.system_type)
+        coe_calc.set_value(base.topic_xc_treatment, calc.XC_functional_name)
+        coe_calc.set_value(base.topic_crystal_system, calc.crystal_system)
+        coe_calc.set_value(base.topic_basis_set_type, calc.basis_set_type)
+
+        # user relations
+        owner_user_id = calc_metadata.get('_uploader', int(self.user_id))
+        coe_calc.owners.append(repo_db.query(User).get(owner_user_id))
+
+        for coauthor_id in calc_metadata.get('coauthors', []):
+            coe_calc.coauthors.append(repo_db.query(User).get(coauthor_id))
+
+        for shared_with_id in calc_metadata.get('shared_with', []):
+            coe_calc.shared_with.append(repo_db.query(User).get(shared_with_id))
+
+        # datasets
+        for dataset_id in calc_metadata.get('datasets', []):
+            dataset = CalcSet(parent_calc_id=dataset_id, children_calc_id=coe_calc.coe_calc_id)
+            repo_db.add(dataset)
+
+        # references
+        for reference in calc_metadata.get('references', []):
+            citation = repo_db.query(Citation).filter_by(
+                value=reference,
+                kind='EXTERNAL').first()
+
+            if citation is None:
+                citation = Citation(value=reference, kind='EXTERNAL')
+                repo_db.add(citation)
+
+            coe_calc.citations.append(citation)
diff --git a/nomad/coe_repo/user.py b/nomad/coe_repo/user.py
new file mode 100644
index 0000000000000000000000000000000000000000..29ae419e4faca2ee3c664f21fc793a9396a65fbd
--- /dev/null
+++ b/nomad/coe_repo/user.py
@@ -0,0 +1,172 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from passlib.hash import bcrypt
+from sqlalchemy import Column, Integer, String
+import datetime
+import jwt
+
+from nomad import infrastructure, config
+
+from .base import Base
+
+
+class Session(Base):  # type: ignore
+    __tablename__ = 'sessions'
+
+    token = Column(String, primary_key=True)
+    user_id = Column(String)
+
+
+class LoginException(Exception):
+    pass
+
+
+class User(Base):  # type: ignore
+    """
+    SQLAlchemy model class that represents NOMAD-coe repository postgresdb *users*.
+    Provides functions for authenticating via password or session token.
+
+    It is not intended to create or update users. This should be done via the
+    NOMAD-coe repository GUI.
+    """
+    __tablename__ = 'users'
+
+    user_id = Column(Integer, primary_key=True)
+    email = Column(String)
+    first_name = Column(String, name='firstname')
+    last_name = Column(String, name='lastname')
+    affiliation = Column(String)
+    password = Column(String)
+
+    def __repr__(self):
+        return '<User(email="%s")>' % self.email
+
+    def _hash_password(self, password):
+        assert False, 'Login functions are done by the NOMAD-coe repository GUI'
+        # password_hash = bcrypt.encrypt(password, ident='2y')
+        # self.password = password_hash
+
+    def _verify_password(self, password):
+        return bcrypt.verify(password, self.password)
+
+    def _generate_auth_token(self, expiration=600):
+        assert False, 'Login functions are done by the NOMAD-coe repository GUI'
+
+    @staticmethod
+    def from_user_id(user_id) -> 'User':
+        return infrastructure.repository_db.query(User).get(user_id)
+
+    def get_auth_token(self):
+        repo_db = infrastructure.repository_db
+        session = repo_db.query(Session).filter_by(user_id=self.user_id).first()
+        if not session:
+            raise LoginException('No session, user probably not logged in at NOMAD-coe repository GUI')
+
+        return session.token.encode('utf-8')
+
+    def get_signature_token(self, expiration=10):
+        """
+        Genertes ver short term JWT token that can be used to sign download URLs.
+
+        Returns: Tuple with token and expiration datetime
+        """
+        expires_at = datetime.datetime.utcnow() + datetime.timedelta(seconds=expiration)
+        token = jwt.encode(
+            dict(user=self.email, exp=expires_at),
+            config.services.api_secret, 'HS256').decode('utf-8')
+        return token, expires_at
+
+    @property
+    def token(self):
+        return self.get_auth_token().decode('utf-8')
+
+    @property
+    def is_admin(self) -> bool:
+        return self.email == 'admin'
+
+    @staticmethod
+    def verify_user_password(email, password):
+        if email is None or password is None or email == '' or password == '':
+            return None
+
+        repo_db = infrastructure.repository_db
+        user = repo_db.query(User).filter_by(email=email).first()
+        if not user:
+            return None
+
+        if user._verify_password(password):
+            return user
+        else:
+            raise LoginException('Wrong password')
+
+    @staticmethod
+    def verify_auth_token(token):
+        if token is None or token == '':
+            return None
+
+        repo_db = infrastructure.repository_db
+        session = repo_db.query(Session).filter_by(token=token).first()
+        if session is None:
+            return None
+
+        user = repo_db.query(User).filter_by(user_id=session.user_id).first()
+        assert user, 'User in sessions must exist.'
+        return user
+
+    @staticmethod
+    def verify_signature_token(token):
+        """
+        Verifies the given JWT token. This should be used to verify URLs signed
+        with a short term signature token (see :func:`get_signature_token`)
+        """
+        try:
+            decoded = jwt.decode(token, config.services.api_secret, algorithms=['HS256'])
+            repo_db = infrastructure.repository_db
+            user = repo_db.query(User).filter_by(email=decoded['user']).first()
+            if user is None:
+                raise LoginException('Token signed for invalid user')
+            else:
+                return user
+        except KeyError:
+            raise LoginException('Token with invalid/unexpected payload')
+        except jwt.ExpiredSignatureError:
+            raise LoginException('Expired token')
+        except jwt.InvalidTokenError:
+            raise LoginException('Invalid token')
+
+
+def ensure_test_user(email):
+    """
+    Allows tests to make sure that the default test users exist in the database.
+    Returns:
+        The user as :class:`User` instance.
+    """
+    repo_db = infrastructure.repository_db
+    existing = repo_db.query(User).filter_by(email=email).first()
+    assert existing, 'Test user %s does not exist.' % email
+
+    session = repo_db.query(Session).filter_by(
+        user_id=existing.user_id).first()
+    assert session, 'Test user %s has no session.' % email
+    assert session.token == existing.first_name.lower(), 'Test user %s session has unexpected token.' % email
+
+    return existing
+
+
+def admin_user():
+    repo_db = infrastructure.repository_db
+    admin = repo_db.query(User).filter_by(user_id=1).first()
+    assert admin, 'Admin user does not exist.'
+    return admin
diff --git a/nomad/config.py b/nomad/config.py
index 90301f0a2cc1deac1fcc8d046cf2738b9b458213..c99a881aa7b3d88e602ceab5e4dfbc9a3c2515f3 100644
--- a/nomad/config.py
+++ b/nomad/config.py
@@ -22,7 +22,7 @@ import logging
 from collections import namedtuple
 
 FilesConfig = namedtuple(
-    'FilesConfig', ['uploads_bucket', 'raw_bucket', 'archive_bucket', 'compress_archive'])
+    'FilesConfig', ['uploads_bucket', 'raw_bucket', 'archive_bucket', 'staging_bucket', 'public_bucket'])
 """ API independent configuration for the object storage. """
 
 CeleryConfig = namedtuple('Celery', ['broker_url'])
@@ -34,7 +34,7 @@ FSConfig = namedtuple('FSConfig', ['tmp', 'objects'])
 RepositoryDBConfig = namedtuple('RepositoryDBConfig', ['host', 'port', 'dbname', 'user', 'password'])
 """ Used to configure access to NOMAD-coe repository db. """
 
-ElasticConfig = namedtuple('ElasticConfig', ['host', 'port', 'index_name'])
+ElasticConfig = namedtuple('ElasticConfig', ['host', 'port', 'index_name', 'coe_repo_calcs_index_name'])
 """ Used to configure elastic search. """
 
 MongoConfig = namedtuple('MongoConfig', ['host', 'port', 'db_name'])
@@ -43,14 +43,15 @@ MongoConfig = namedtuple('MongoConfig', ['host', 'port', 'db_name'])
 LogstashConfig = namedtuple('LogstashConfig', ['enabled', 'host', 'tcp_port', 'level'])
 """ Used to configure and enable/disable the ELK based centralized logging. """
 
-NomadServicesConfig = namedtuple('NomadServicesConfig', ['api_host', 'api_port', 'api_base_path', 'api_secret'])
+NomadServicesConfig = namedtuple('NomadServicesConfig', ['api_host', 'api_port', 'api_base_path', 'api_secret', 'admin_password', 'disable_reset'])
 """ Used to configure nomad services: worker, handler, api """
 
 files = FilesConfig(
     uploads_bucket='uploads',
     raw_bucket=os.environ.get('NOMAD_FILES_RAW_BUCKET', 'raw'),
     archive_bucket='archive',
-    compress_archive=True
+    staging_bucket='staging',
+    public_bucket='public'
 )
 
 rabbit_host = os.environ.get('NOMAD_RABBITMQ_HOST', 'localhost')
@@ -82,7 +83,8 @@ fs = FSConfig(
 elastic = ElasticConfig(
     host=os.environ.get('NOMAD_ELASTIC_HOST', 'localhost'),
     port=int(os.environ.get('NOMAD_ELASTIC_PORT', 9200)),
-    index_name=os.environ.get('NOMAD_ELASTIC_INDEX_NAME', 'calcs')
+    index_name=os.environ.get('NOMAD_ELASTIC_INDEX_NAME', 'calcs'),
+    coe_repo_calcs_index_name='coe_repo_calcs'
 )
 repository_db = RepositoryDBConfig(
     host=os.environ.get('NOMAD_COE_REPO_DB_HOST', 'localhost'),
@@ -106,8 +108,18 @@ services = NomadServicesConfig(
     api_host=os.environ.get('NOMAD_API_HOST', 'localhost'),
     api_port=int(os.environ.get('NOMAD_API_PORT', 8000)),
     api_base_path=os.environ.get('NOMAD_API_BASE_PATH', '/nomad/api'),
-    api_secret=os.environ.get('NOMAD_API_SECRET', 'defaultApiSecret')
+    api_secret=os.environ.get('NOMAD_API_SECRET', 'defaultApiSecret'),
+    admin_password=os.environ.get('NOMAD_API_ADMIN_PASSWORD', 'password'),
+    disable_reset=os.environ.get('NOMAD_API_DISABLE_RESET', 'false') == 'true'
+)
+migration_source_db = RepositoryDBConfig(
+    host=os.environ.get('NOMAD_MIGRATION_SOURCE_DB_HOST', 'db-repository.nomad.esc'),
+    port=int(os.environ.get('NOMAD_MIGRATION_SOURCE_DB_PORT', 5432)),
+    dbname=os.environ.get('NOMAD_MIGRATION_SOURCE_DB_NAME', 'nomad_prod'),
+    user=os.environ.get('NOMAD_MIGRATION_SOURCE_USER', 'nomadlab'),
+    password=os.environ.get('NOMAD_MIGRATION_SOURCE_PASSWORD', '*')
 )
 
 console_log_level = get_loglevel_from_env('NOMAD_CONSOLE_LOGLEVEL', default_level=logging.INFO)
 service = os.environ.get('NOMAD_SERVICE', 'unknown nomad service')
+release = os.environ.get('NOMAD_RELEASE', 'devel')
diff --git a/nomad/datamodel.py b/nomad/datamodel.py
new file mode 100644
index 0000000000000000000000000000000000000000..5c87377ab3e9e1591a116d6080a5eefe721f2745
--- /dev/null
+++ b/nomad/datamodel.py
@@ -0,0 +1,139 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This module contains classes that allow to represent the core
+nomad data entities :class:`Upload` and :class:`Calc` on a high level of abstraction
+independent from their representation in the different modules :py:mod:`nomad.repo`,
+:py:mod:`nomad.processing`, :py:mod:`nomad.coe_repo`, :py:mod:`nomad.files`.
+It is not about representing every detail, but those parts that are directly involved in
+api, processing, migration, mirroring, or other 'infrastructure' operations.
+"""
+
+from typing import Type, TypeVar, Union, Iterable, cast, Callable, Dict
+import datetime
+
+from nomad import utils
+
+T = TypeVar('T')
+
+
+class Entity():
+    @classmethod
+    def load_from(cls: Type[T], obj) -> T:
+        raise NotImplementedError
+
+    def to(self, entity_cls: Type[T]) -> T:
+        """
+        Either provides a type cast if it already has the right type, or adapt
+        the type using the :func:`load_from` of the target class :param:`entity_cls`.
+        """
+        if (isinstance(self, entity_cls)):
+            return cast(T, self)
+        else:
+            return cast(T, cast(Type[Entity], entity_cls).load_from(self))
+
+
+class Calc(Entity):
+    """
+    A nomad calculation.
+
+    Attributes:
+        pid: The persistent id (pid) for the calculation
+        mainfile: The mainfile path relative to upload root
+        calc_id: A unique id/checksum that describes unique calculations
+        upload: The upload object that this calculation belongs to.
+    """
+    @property
+    def pid(self) -> Union[int, str]:
+        raise NotImplementedError
+
+    @property
+    def mainfile(self) -> str:
+        raise NotImplementedError
+
+    @property
+    def calc_id(self) -> str:
+        raise NotImplementedError
+
+    @property
+    def upload(self) -> 'Upload':
+        raise NotImplementedError
+
+
+class Upload(Entity):
+    """
+    A nomad upload.
+
+    Attributes:
+        upload_id(str): The unique random id that each upload has
+        upload_time(datatime): The upload time
+        uploader(repo.User): The user that uploaded this upload
+        calcs(Iterable[Calc]): An iterable over the calculations of this upload
+    """
+    @property
+    def upload_id(self) -> str:
+        return '<not assigned>'
+
+    @property
+    def upload_time(self) -> Type[datetime.datetime]:
+        raise NotImplementedError
+
+    @property
+    def uploader(self):
+        raise NotImplementedError
+
+    @property
+    def calcs(self) -> Iterable[Calc]:
+        raise NotImplementedError
+
+
+class UploadWithMetadata(dict, Entity):
+
+    def __init__(self, upload_id):
+        self.upload_id = upload_id
+
+
+class CalcWithMetadata(utils.POPO, Entity):
+    """
+    A dict/POPO class that can be used for mapping calc representations with calc metadata.
+    We have many representations of calcs and their calc metadata. To avoid implement
+    mappings between all combinations, just implement mappings with the class and use
+    mapping transitivity. E.g. instead of A -> B, A -> this -> B.
+
+    The other calc representations can register mappings from them, in order to allow
+    to use this classes `load_from` method.
+    """
+    mappings: Dict[Type[Entity], Callable[[Entity], 'CalcWithMetadata']] = dict()
+
+    @classmethod
+    def register_mapping(
+            cls, from_type: Type[Entity], mapping: Callable[[Entity], 'CalcWithMetadata']):
+        """
+        Register a mapping from instances of another calc representation to instances of
+        :class:`CalcWithMetadata`.
+        Arguments:
+            from_type: The source calc type of the mapping.
+            mapping: The mapping itself as a callable that takes a source object of the
+                source calc type and returns an instance of :class:`CalcWithMetadata`.
+        """
+        cls.mappings[from_type] = mapping
+
+    @classmethod
+    def load_from(cls, obj):
+        return CalcWithMetadata.mappings[obj.__class__](obj)
+
+    def __init__(self, **kwargs):
+        super().__init__(**kwargs)
+        self.upload = UploadWithMetadata(kwargs['upload_id'])
diff --git a/nomad/empty_repository_db.sql b/nomad/empty_repository_db.sql
index 4eb25451994404645ca60fca4576ab653bac26bf..1a78a82ab08e08feba2ecbc87e64a46721766e34 100644
--- a/nomad/empty_repository_db.sql
+++ b/nomad/empty_repository_db.sql
@@ -11,14 +11,14 @@ SET check_function_bodies = false;
 SET client_min_messages = warning;
 
 --
--- Name: plpgsql; Type: EXTENSION; Schema: -; Owner: 
+-- Name: plpgsql; Type: EXTENSION; Schema: -; Owner:
 --
 
 CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;
 
 
 --
--- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner: 
+-- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner:
 --
 
 COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language';
@@ -41,7 +41,7 @@ SET default_tablespace = '';
 SET default_with_oids = false;
 
 --
--- Name: affiliations; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: affiliations; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.affiliations (
@@ -76,7 +76,7 @@ ALTER SEQUENCE public.affiliations_a_id_seq OWNED BY public.affiliations.a_id;
 
 
 --
--- Name: alembic_version; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: alembic_version; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.alembic_version (
@@ -87,7 +87,7 @@ CREATE TABLE public.alembic_version (
 ALTER TABLE public.alembic_version OWNER TO postgres;
 
 --
--- Name: atoms; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: atoms; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.atoms (
@@ -127,7 +127,7 @@ ALTER SEQUENCE public.atoms_atom_id_seq OWNED BY public.atoms.atom_id;
 
 
 --
--- Name: basis_sets; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: basis_sets; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.basis_sets (
@@ -145,7 +145,7 @@ CREATE TABLE public.basis_sets (
 ALTER TABLE public.basis_sets OWNER TO postgres;
 
 --
--- Name: calcsets; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: calcsets; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.calcsets (
@@ -157,7 +157,7 @@ CREATE TABLE public.calcsets (
 ALTER TABLE public.calcsets OWNER TO postgres;
 
 --
--- Name: calculations; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: calculations; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.calculations (
@@ -167,7 +167,8 @@ CREATE TABLE public.calculations (
     origin_id integer,
     nested_depth integer,
     frozen boolean,
-    calc_id integer NOT NULL
+    calc_id integer NOT NULL,
+    handlepid varchar(100) NULL
 );
 
 
@@ -195,7 +196,7 @@ ALTER SEQUENCE public.calculations_calc_id_seq OWNED BY public.calculations.calc
 
 
 --
--- Name: charges; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: charges; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.charges (
@@ -212,7 +213,7 @@ CREATE TABLE public.charges (
 ALTER TABLE public.charges OWNER TO postgres;
 
 --
--- Name: citations; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: citations; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.citations (
@@ -246,7 +247,7 @@ ALTER SEQUENCE public.citations_citation_id_seq OWNED BY public.citations.citati
 
 
 --
--- Name: coauthorships; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: coauthorships; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.coauthorships (
@@ -258,7 +259,7 @@ CREATE TABLE public.coauthorships (
 ALTER TABLE public.coauthorships OWNER TO postgres;
 
 --
--- Name: codefamilies; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: codefamilies; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.codefamilies (
@@ -291,7 +292,7 @@ ALTER SEQUENCE public.codefamilies_family_id_seq OWNED BY public.codefamilies.fa
 
 
 --
--- Name: codeversions; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: codeversions; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.codeversions (
@@ -325,7 +326,7 @@ ALTER SEQUENCE public.codeversions_version_id_seq OWNED BY public.codeversions.v
 
 
 --
--- Name: doi_mapping; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: doi_mapping; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.doi_mapping (
@@ -358,7 +359,7 @@ ALTER SEQUENCE public.doi_mapping_calc_id_seq OWNED BY public.doi_mapping.calc_i
 
 
 --
--- Name: eigenvalues; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: eigenvalues; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.eigenvalues (
@@ -396,7 +397,7 @@ ALTER SEQUENCE public.eigenvalues_eid_seq OWNED BY public.eigenvalues.eid;
 
 
 --
--- Name: electrons; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: electrons; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.electrons (
@@ -409,7 +410,7 @@ CREATE TABLE public.electrons (
 ALTER TABLE public.electrons OWNER TO postgres;
 
 --
--- Name: energies; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: energies; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.energies (
@@ -422,7 +423,7 @@ CREATE TABLE public.energies (
 ALTER TABLE public.energies OWNER TO postgres;
 
 --
--- Name: forces; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: forces; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.forces (
@@ -434,7 +435,7 @@ CREATE TABLE public.forces (
 ALTER TABLE public.forces OWNER TO postgres;
 
 --
--- Name: grid; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: grid; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.grid (
@@ -446,7 +447,7 @@ CREATE TABLE public.grid (
 ALTER TABLE public.grid OWNER TO postgres;
 
 --
--- Name: lattices; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: lattices; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.lattices (
@@ -472,7 +473,7 @@ CREATE TABLE public.lattices (
 ALTER TABLE public.lattices OWNER TO postgres;
 
 --
--- Name: login_tokens; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: login_tokens; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.login_tokens (
@@ -485,28 +486,27 @@ CREATE TABLE public.login_tokens (
 ALTER TABLE public.login_tokens OWNER TO postgres;
 
 --
--- Name: metadata; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: metadata; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.metadata (
-    calc_id integer NOT NULL,
-    version_id integer,
-    location character varying,
-    finished integer,
-    raw_input text,
-    modeling_time double precision,
-    chemical_formula character varying,
-    added timestamp with time zone,
-    oadate timestamp with time zone,
-    download_size bigint,
-    filenames bytea
+	version_id int4 NULL,
+	"location" varchar NULL,
+	finished int4 NULL,
+	raw_input text NULL,
+	modeling_time float8 NULL,
+	chemical_formula varchar NULL,
+	added timestamptz NULL,
+	download_size int8 NULL,
+	filenames bytea NULL,
+	calc_id int4 NOT NULL,
+	oadate timestamptz NULL
 );
 
-
 ALTER TABLE public.metadata OWNER TO postgres;
 
 --
--- Name: metadata_citations; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: metadata_citations; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.metadata_citations (
@@ -518,7 +518,7 @@ CREATE TABLE public.metadata_citations (
 ALTER TABLE public.metadata_citations OWNER TO postgres;
 
 --
--- Name: ownerships; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: ownerships; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.ownerships (
@@ -530,7 +530,7 @@ CREATE TABLE public.ownerships (
 ALTER TABLE public.ownerships OWNER TO postgres;
 
 --
--- Name: phonons; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: phonons; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.phonons (
@@ -541,7 +541,7 @@ CREATE TABLE public.phonons (
 ALTER TABLE public.phonons OWNER TO postgres;
 
 --
--- Name: pottypes; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: pottypes; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.pottypes (
@@ -574,7 +574,7 @@ ALTER SEQUENCE public.pottypes_pottype_id_seq OWNED BY public.pottypes.pottype_i
 
 
 --
--- Name: pragma; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: pragma; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.pragma (
@@ -585,7 +585,7 @@ CREATE TABLE public.pragma (
 ALTER TABLE public.pragma OWNER TO postgres;
 
 --
--- Name: recipintegs; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: recipintegs; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.recipintegs (
@@ -600,7 +600,7 @@ CREATE TABLE public.recipintegs (
 ALTER TABLE public.recipintegs OWNER TO postgres;
 
 --
--- Name: sessions; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: sessions; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.sessions (
@@ -615,7 +615,7 @@ CREATE TABLE public.sessions (
 ALTER TABLE public.sessions OWNER TO postgres;
 
 --
--- Name: shareships; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: shareships; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.shareships (
@@ -627,7 +627,7 @@ CREATE TABLE public.shareships (
 ALTER TABLE public.shareships OWNER TO postgres;
 
 --
--- Name: spacegroups; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: spacegroups; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.spacegroups (
@@ -639,7 +639,7 @@ CREATE TABLE public.spacegroups (
 ALTER TABLE public.spacegroups OWNER TO postgres;
 
 --
--- Name: struct_optimisation; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: struct_optimisation; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.struct_optimisation (
@@ -652,7 +652,7 @@ CREATE TABLE public.struct_optimisation (
 ALTER TABLE public.struct_optimisation OWNER TO postgres;
 
 --
--- Name: struct_ratios; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: struct_ratios; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.struct_ratios (
@@ -668,7 +668,7 @@ CREATE TABLE public.struct_ratios (
 ALTER TABLE public.struct_ratios OWNER TO postgres;
 
 --
--- Name: structures; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: structures; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.structures (
@@ -703,7 +703,7 @@ ALTER SEQUENCE public.structures_struct_id_seq OWNED BY public.structures.struct
 
 
 --
--- Name: tags; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: tags; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.tags (
@@ -715,7 +715,7 @@ CREATE TABLE public.tags (
 ALTER TABLE public.tags OWNER TO postgres;
 
 --
--- Name: topics; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: topics; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.topics (
@@ -749,7 +749,7 @@ ALTER SEQUENCE public.topics_tid_seq OWNED BY public.topics.tid;
 
 
 --
--- Name: uploads; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: uploads; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.uploads (
@@ -788,7 +788,7 @@ ALTER SEQUENCE public.uploads_upload_id_seq OWNED BY public.uploads.upload_id;
 
 
 --
--- Name: user_metadata; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: user_metadata; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.user_metadata (
@@ -801,7 +801,7 @@ CREATE TABLE public.user_metadata (
 ALTER TABLE public.user_metadata OWNER TO postgres;
 
 --
--- Name: users; Type: TABLE; Schema: public; Owner: postgres; Tablespace: 
+-- Name: users; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
 --
 
 CREATE TABLE public.users (
@@ -1155,8 +1155,8 @@ INSERT INTO public.pragma VALUES ('4.59');
 -- Data for Name: sessions; Type: TABLE DATA; Schema: public; Owner: postgres
 --
 
-INSERT INTO public.sessions VALUES ('leonard.hofstadter@nomad-fairdi.tests.de', 2, '2100-12-17 09:00:00+00', NULL, NULL);
-INSERT INTO public.sessions VALUES ('sheldon.cooper@nomad-fairdi.tests.de', 1, '2100-12-17 09:00:00+00', NULL, NULL);
+INSERT INTO public.sessions VALUES ('leonard', 3, '2100-12-17 09:00:00+00', NULL, NULL);
+INSERT INTO public.sessions VALUES ('sheldon', 2, '2100-12-17 09:00:00+00', NULL, NULL);
 
 
 --
@@ -1238,8 +1238,9 @@ SELECT pg_catalog.setval('public.uploads_upload_id_seq', 1, false);
 -- Data for Name: users; Type: TABLE DATA; Schema: public; Owner: postgres
 --
 
-INSERT INTO public.users VALUES (1, 'Sheldon', 'Cooper', 'sheldon.cooper', 'sheldon.cooper@nomad-fairdi.tests.de', NULL, '$2y$12$jths1LQPsLofuBQ3evVIluhQeQ/BZfbdTSZHFcPGdcNmHz2WvDj.y', NULL);
-INSERT INTO public.users VALUES (2, 'Leonard', 'Hofstadter', 'leonard.hofstadter', 'leonard.hofstadter@nomad-fairdi.tests.de', NULL, '$2y$12$jths1LQPsLofuBQ3evVIluhQeQ/BZfbdTSZHFcPGdcNmHz2WvDj.y', NULL);
+INSERT INTO public.users VALUES (1, 'admin', 'admin', 'admin', 'admin', NULL, NULL, NULL);
+INSERT INTO public.users VALUES (2, 'Sheldon', 'Cooper', 'sheldon.cooper', 'sheldon.cooper@nomad-fairdi.tests.de', NULL, '$2y$12$jths1LQPsLofuBQ3evVIluhQeQ/BZfbdTSZHFcPGdcNmHz2WvDj.y', NULL);
+INSERT INTO public.users VALUES (3, 'Leonard', 'Hofstadter', 'leonard.hofstadter', 'leonard.hofstadter@nomad-fairdi.tests.de', NULL, '$2y$12$jths1LQPsLofuBQ3evVIluhQeQ/BZfbdTSZHFcPGdcNmHz2WvDj.y', NULL);
 
 
 --
@@ -1250,7 +1251,7 @@ SELECT pg_catalog.setval('public.users_user_id_seq', 1, true);
 
 
 --
--- Name: affiliations_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: affiliations_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.affiliations
@@ -1258,7 +1259,7 @@ ALTER TABLE ONLY public.affiliations
 
 
 --
--- Name: atoms_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: atoms_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.atoms
@@ -1266,7 +1267,7 @@ ALTER TABLE ONLY public.atoms
 
 
 --
--- Name: basis_sets_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: basis_sets_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.basis_sets
@@ -1274,7 +1275,7 @@ ALTER TABLE ONLY public.basis_sets
 
 
 --
--- Name: calculations_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: calculations_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.calculations
@@ -1282,7 +1283,7 @@ ALTER TABLE ONLY public.calculations
 
 
 --
--- Name: charges_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: charges_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.charges
@@ -1290,7 +1291,7 @@ ALTER TABLE ONLY public.charges
 
 
 --
--- Name: citations_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: citations_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.citations
@@ -1298,7 +1299,7 @@ ALTER TABLE ONLY public.citations
 
 
 --
--- Name: citations_value_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: citations_value_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.citations
@@ -1306,7 +1307,7 @@ ALTER TABLE ONLY public.citations
 
 
 --
--- Name: codefamilies_content_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: codefamilies_content_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.codefamilies
@@ -1314,7 +1315,7 @@ ALTER TABLE ONLY public.codefamilies
 
 
 --
--- Name: codefamilies_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: codefamilies_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.codefamilies
@@ -1322,7 +1323,7 @@ ALTER TABLE ONLY public.codefamilies
 
 
 --
--- Name: codeversions_content_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: codeversions_content_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.codeversions
@@ -1330,7 +1331,7 @@ ALTER TABLE ONLY public.codeversions
 
 
 --
--- Name: codeversions_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: codeversions_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.codeversions
@@ -1338,7 +1339,7 @@ ALTER TABLE ONLY public.codeversions
 
 
 --
--- Name: doi_mapping_id_str_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: doi_mapping_id_str_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.doi_mapping
@@ -1346,7 +1347,7 @@ ALTER TABLE ONLY public.doi_mapping
 
 
 --
--- Name: doi_mapping_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: doi_mapping_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.doi_mapping
@@ -1354,7 +1355,7 @@ ALTER TABLE ONLY public.doi_mapping
 
 
 --
--- Name: eigenvalues_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: eigenvalues_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.eigenvalues
@@ -1362,7 +1363,7 @@ ALTER TABLE ONLY public.eigenvalues
 
 
 --
--- Name: electrons_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: electrons_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.electrons
@@ -1370,7 +1371,7 @@ ALTER TABLE ONLY public.electrons
 
 
 --
--- Name: energies_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: energies_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.energies
@@ -1378,7 +1379,7 @@ ALTER TABLE ONLY public.energies
 
 
 --
--- Name: forces_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: forces_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.forces
@@ -1386,7 +1387,7 @@ ALTER TABLE ONLY public.forces
 
 
 --
--- Name: grid_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: grid_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.grid
@@ -1394,7 +1395,7 @@ ALTER TABLE ONLY public.grid
 
 
 --
--- Name: lattices_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: lattices_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.lattices
@@ -1402,7 +1403,7 @@ ALTER TABLE ONLY public.lattices
 
 
 --
--- Name: login_tokens_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: login_tokens_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.login_tokens
@@ -1410,7 +1411,7 @@ ALTER TABLE ONLY public.login_tokens
 
 
 --
--- Name: metadata_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: metadata_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.metadata
@@ -1418,7 +1419,7 @@ ALTER TABLE ONLY public.metadata
 
 
 --
--- Name: phonons_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: phonons_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.phonons
@@ -1426,7 +1427,7 @@ ALTER TABLE ONLY public.phonons
 
 
 --
--- Name: pottypes_name_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: pottypes_name_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.pottypes
@@ -1434,7 +1435,7 @@ ALTER TABLE ONLY public.pottypes
 
 
 --
--- Name: pottypes_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: pottypes_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.pottypes
@@ -1442,7 +1443,7 @@ ALTER TABLE ONLY public.pottypes
 
 
 --
--- Name: pragma_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: pragma_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.pragma
@@ -1450,7 +1451,7 @@ ALTER TABLE ONLY public.pragma
 
 
 --
--- Name: recipintegs_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: recipintegs_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.recipintegs
@@ -1458,7 +1459,7 @@ ALTER TABLE ONLY public.recipintegs
 
 
 --
--- Name: sessions_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: sessions_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.sessions
@@ -1466,7 +1467,7 @@ ALTER TABLE ONLY public.sessions
 
 
 --
--- Name: spacegroups_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: spacegroups_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.spacegroups
@@ -1474,7 +1475,7 @@ ALTER TABLE ONLY public.spacegroups
 
 
 --
--- Name: struct_optimisation_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: struct_optimisation_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.struct_optimisation
@@ -1482,7 +1483,7 @@ ALTER TABLE ONLY public.struct_optimisation
 
 
 --
--- Name: struct_ratios_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: struct_ratios_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.struct_ratios
@@ -1490,7 +1491,7 @@ ALTER TABLE ONLY public.struct_ratios
 
 
 --
--- Name: structures_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: structures_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.structures
@@ -1498,7 +1499,7 @@ ALTER TABLE ONLY public.structures
 
 
 --
--- Name: topics_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: topics_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.topics
@@ -1506,7 +1507,7 @@ ALTER TABLE ONLY public.topics
 
 
 --
--- Name: u_children_parent_calc_id; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: u_children_parent_calc_id; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.calcsets
@@ -1514,7 +1515,7 @@ ALTER TABLE ONLY public.calcsets
 
 
 --
--- Name: u_coauthorships_calc_id_user; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: u_coauthorships_calc_id_user; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.coauthorships
@@ -1522,7 +1523,7 @@ ALTER TABLE ONLY public.coauthorships
 
 
 --
--- Name: u_coauthorships_user_calc_id; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: u_coauthorships_user_calc_id; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.coauthorships
@@ -1530,7 +1531,7 @@ ALTER TABLE ONLY public.coauthorships
 
 
 --
--- Name: u_metadata_citations_calc_citation; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: u_metadata_citations_calc_citation; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.metadata_citations
@@ -1538,7 +1539,7 @@ ALTER TABLE ONLY public.metadata_citations
 
 
 --
--- Name: u_metadata_citations_citation_calc; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: u_metadata_citations_citation_calc; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.metadata_citations
@@ -1546,7 +1547,7 @@ ALTER TABLE ONLY public.metadata_citations
 
 
 --
--- Name: u_ownerships_calc_id_user; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: u_ownerships_calc_id_user; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.ownerships
@@ -1554,7 +1555,7 @@ ALTER TABLE ONLY public.ownerships
 
 
 --
--- Name: u_ownerships_user_calc_id; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: u_ownerships_user_calc_id; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.ownerships
@@ -1562,7 +1563,7 @@ ALTER TABLE ONLY public.ownerships
 
 
 --
--- Name: u_parent_children_calc_id; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: u_parent_children_calc_id; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.calcsets
@@ -1570,7 +1571,7 @@ ALTER TABLE ONLY public.calcsets
 
 
 --
--- Name: u_shareships_calc_id_user; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: u_shareships_calc_id_user; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.shareships
@@ -1578,7 +1579,7 @@ ALTER TABLE ONLY public.shareships
 
 
 --
--- Name: u_shareships_user_calc_id; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: u_shareships_user_calc_id; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.shareships
@@ -1586,7 +1587,7 @@ ALTER TABLE ONLY public.shareships
 
 
 --
--- Name: u_tags_calc_id_tid; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: u_tags_calc_id_tid; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.tags
@@ -1594,7 +1595,7 @@ ALTER TABLE ONLY public.tags
 
 
 --
--- Name: u_tags_tid_calc_id; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: u_tags_tid_calc_id; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.tags
@@ -1602,7 +1603,7 @@ ALTER TABLE ONLY public.tags
 
 
 --
--- Name: uploads_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: uploads_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.uploads
@@ -1610,7 +1611,7 @@ ALTER TABLE ONLY public.uploads
 
 
 --
--- Name: user_metadata_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: user_metadata_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.user_metadata
@@ -1618,7 +1619,7 @@ ALTER TABLE ONLY public.user_metadata
 
 
 --
--- Name: users_email_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: users_email_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.users
@@ -1626,7 +1627,7 @@ ALTER TABLE ONLY public.users
 
 
 --
--- Name: users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.users
@@ -1634,7 +1635,7 @@ ALTER TABLE ONLY public.users
 
 
 --
--- Name: users_username_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace: 
+-- Name: users_username_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
 --
 
 ALTER TABLE ONLY public.users
diff --git a/nomad/files.py b/nomad/files.py
index dee11e19b0fa5112dee7f0e55f719b93084643b5..ba6aceaceb4a3a4b889040f43d8314460a3a8d0f 100644
--- a/nomad/files.py
+++ b/nomad/files.py
@@ -13,91 +13,71 @@
 # limitations under the License.
 
 """
-This file storage abstraction uses an *object storage*-like metaphor to store
-objects on the file system. Objects are organized in *buckets* and object ids
-are basically paths. All major file system operations for dealing with
-uploaded files, archive, files, raw files, etc. should be part of this module to
-allow later introduction of real object storage systems.
-
-.. note:: This module still uses ``os.path``. As long as the whole nomad runs on a
-    POSIX (or Windows) os everything should be fine. This means respective paths in the
-    dbs, and indices. In the future, this should be replaced with abstract path representations
-    ala ``PathLib``.
-
-.. autoclass:: File
-    :members:
-.. autoclass:: ZippedFile
-    :members:
-.. autoclass:: ObjectFile
-    :members:
-.. autoclass:: UploadFile
-    :members:
-.. autoclass:: ArchiveFile
-    :members:
-.. autoclass:: DataContainer
-    :members:
-.. autoclass:: BaggedDataContainer
-    :members:
-.. autoclass:: ZippedDataContainer
-    :members:
+Uploads contains classes and functions to create and maintain file structures
+for uploads.
+
+There are two different structures for uploads in two different states: *staging* and *public*.
+Possible operations on uploads differ based on this state. Staging is used for
+processing, heavily editing, creating hashes, etc. Public is supposed to be a
+almost readonly (beside metadata) storage.
+
+::
+    fs/staging/<upload>/metadata/<calc>.json
+                       /raw/**
+                       /archive/<calc>.hdf5
+                       /.frozen
+                       /.public
+                       /.restricted
+    fs/public/<upload>/metadata.json.gz
+                      /raw-public.bagit.zip
+                      /raw-restricted.bagit.zip
+                      /archive-public.hdf5.zip
+                      /archive-restricted.hdf5.zip
 """
-from abc import ABC
-from typing import List, Generator, IO, TextIO, cast, Dict, Any
-import os
+
+from abc import ABCMeta
+from typing import IO, Generator, Dict, Iterator, Iterable, Callable
+import ujson
 import os.path
-from zipfile import ZipFile, BadZipFile, is_zipfile, ZIP_DEFLATED
+import os
 import shutil
-from contextlib import contextmanager
-import gzip
+from zipfile import ZipFile, BadZipFile, is_zipfile
+import tarfile
+from bagit import make_bag
+import hashlib
+import base64
 import io
-import bagit
-import json
+import gzip
 
-from nomad import config, utils
+from nomad import config, utils, datamodel
 
 
-class File:
+class PathObject:
     """
-    Base class for handling a file. Allows to open (read, write) and delete files.
-
+    Object storage-like abstraction for paths in general.
     Arguments:
-        os_path: The path to the file in the os filesystem.
-
-    Attributes:
-        logger: A structured logger with bucket and object information.
-        path: The abstract path of the file.
+        bucket: The bucket to store this object in
+        object_id: The object id (i.e. directory path)
+        os_path: Override the "object storage" path with the given path.
+        prefix: Add a 3-digit prefix directory, e.g. foo/test/ -> foo/tes/test
     """
-    def __init__(self, os_path: str = None) -> None:
-        self.os_path = os_path
-
-        self.logger = self.bind_logger(utils.get_logger(__name__))
+    def __init__(self, bucket: str, object_id: str, os_path: str = None, prefix: bool = False) -> None:
+        if os_path:
+            self.os_path = os_path
+        else:
+            self.os_path = os.path.join(config.fs.objects, bucket, object_id)
 
-    def bind_logger(self, logger):
-        """ Adds context information to the given logger and returns it. """
-        return logger.bind(path=self.os_path)
-
-    @contextmanager
-    def open(self, mode: str = 'r', *args, **kwargs) -> Generator[IO, None, None]:
-        """ Opens the object with he given mode, etc. """
-        self.logger.debug('open file')
-        try:
-            if mode.startswith('w'):
-                self.create_dirs()
-            with open(self.os_path, mode, *args, **kwargs) as f:
-                yield f
-        except FileNotFoundError:
-            raise KeyError()
+        if prefix:
+            segments = list(os.path.split(self.os_path))
+            last = segments[-1]
+            segments[-1] = last[:3]
+            segments.append(last)
+            self.os_path = os.path.join(*segments)
 
     def delete(self) -> None:
-        """ Deletes the file. """
-        try:
-            os.remove(self.os_path)
-            self.logger.debug('file deleted')
-        except FileNotFoundError:
-            raise KeyError()
+        shutil.rmtree(self.os_path)
 
     def exists(self) -> bool:
-        """ Returns true if object exists. """
         return os.path.exists(self.os_path)
 
     @property
@@ -105,596 +85,649 @@ class File:
         """ Returns the os determined file size. """
         return os.stat(self.os_path).st_size
 
-    @property
-    def path(self) -> str:
+    def __repr__(self) -> str:
         return self.os_path
 
-    def create_dirs(self) -> None:
-        directory = os.path.dirname(self.os_path)
-        if not os.path.isdir(directory):
-            os.makedirs(directory)
-
-
-class ZippedFile(File):
-    """ A file contained in a .zip archive. """
-    def __init__(self, zip_os_path: str, filename: str) -> None:
-        self.filename = filename
-        super().__init__(zip_os_path)
 
-    def bind_logger(self, logger):
-        return super().bind_logger(logger).bind(filename=self.filename)
+class DirectoryObject(PathObject):
+    """
+    Object storage-like abstraction for directories.
+    Arguments:
+        bucket: The bucket to store this object in
+        object_id: The object id (i.e. directory path)
+        create: True if the directory structure should be created. Default is False.
+    """
+    def __init__(self, bucket: str, object_id: str, create: bool = False, **kwargs) -> None:
+        super().__init__(bucket, object_id, **kwargs)
+        self._create = create
+        if create and not os.path.isdir(self.os_path):
+            os.makedirs(self.os_path)
+
+    def join_dir(self, path, create: bool = None) -> 'DirectoryObject':
+        if create is None:
+            create = self._create
+        return DirectoryObject(None, None, create=create, os_path=os.path.join(self.os_path, path))
+
+    def join_file(self, path) -> PathObject:
+        dirname = os.path.dirname(path)
+        if dirname != '':
+            return self.join_dir(dirname).join_file(os.path.basename(path))
+        else:
+            return PathObject(None, None, os_path=os.path.join(self.os_path, path))
 
-    @contextmanager
-    def open(self, *args, **kwargs) -> Generator[IO, None, None]:
-        self.logger.debug('open file')
-        try:
-            with ZipFile(self.os_path) as zip_file:
-                yield zip_file.open(self.filename, *args, **kwargs)
-        except FileNotFoundError:
-            raise KeyError()
-        except KeyError as e:
-            raise e
-        except Exception as e:
-            msg = 'Could not read upload.'
-            self.logger.error(msg, exc_info=e)
-            raise FileError(msg, e)
+    def exists(self) -> bool:
+        return os.path.isdir(self.os_path)
 
-    def delete(self) -> None:
-        assert False, "A file in a zip archive cannot be deleted."
 
-    @property
-    def size(self) -> int:
-        with ZipFile(self.os_path) as zip_file:
-            return zip_file.getinfo(self.filename).file_size
+class ExtractError(Exception):
+    pass
 
-    @property
-    def path(self) -> str:
-        return os.path.join(
-            os.path.dirname(self.os_path),
-            os.path.basename(self.os_path),
-            self.filename)
 
+class Metadata(metaclass=ABCMeta):
+    """
+    An ABC for upload metadata classes that encapsulates access to a set of calc metadata.
+    """
+    def get(self, calc_id: str) -> dict:
+        """ Retrive the calc metadata for a given calc. """
+        raise NotImplementedError()
 
-class Objects:
-    @classmethod
-    def _os_path(cls, bucket: str, name: str, ext: str = None) -> str:
-        if ext is not None and ext != '':
-            file_name = '%s.%s' % (name, ext)
-        elif name is None or name == '':
-            file_name = ''
-        else:
-            file_name = name
+    def __iter__(self) -> Iterator[dict]:
+        raise NotImplementedError()
 
-        # add an extra directory to limit the files per directory (gpfs)
-        file_name = '%s/%s' % (file_name[0:3], file_name)
+    def __len__(self) -> int:
+        raise NotImplementedError()
 
-        path_segments = file_name.split('/')
-        path = os.path.join(*([config.fs.objects, bucket] + path_segments))
 
-        return os.path.abspath(path)
+class StagingMetadata(Metadata):
+    """
+    A Metadata implementation based on individual .json files per calc stored in a given
+    directory.
+    Arguments:
+        directory: The DirectoryObject for the directory to store the metadata in.
+    """
+    def __init__(self, directory: DirectoryObject) -> None:
+        self._dir = directory
+
+    def remove(self, calc: dict) -> None:
+        id = calc['calc_id']
+        path = self._dir.join_file('%s.json' % id)
+        assert path.exists()
+        os.remove(path.os_path)
+
+    def insert(self, calc: dict) -> None:
+        """ Insert a calc, using calc_id as key. """
+        id = calc['calc_id']
+        path = self._dir.join_file('%s.json' % id)
+        assert not path.exists()
+        with open(path.os_path, 'wt') as f:
+            ujson.dump(calc, f)
+
+    def update(self, calc_id: str, updates: dict) -> dict:
+        """ Updating a calc, using calc_id as key and running dict update with the given data. """
+        metadata = self.get(calc_id)
+        metadata.update(updates)
+        path = self._dir.join_file('%s.json' % calc_id)
+        with open(path.os_path, 'wt') as f:
+            ujson.dump(metadata, f)
+        return metadata
 
-    @classmethod
-    def delete_all(cls, bucket: str, prefix: str = ''):
-        """ Delete all files with given prefix, prefix must denote a directory. """
+    def get(self, calc_id: str) -> dict:
         try:
-            shutil.rmtree(cls._os_path(bucket, prefix, ext=None))
+            with open(self._dir.join_file('%s.json' % calc_id).os_path, 'rt') as f:
+                return ujson.load(f)
         except FileNotFoundError:
-            pass
+            raise KeyError()
+
+    def __iter__(self) -> Iterator[dict]:
+        for root, _, files in os.walk(self._dir.os_path):
+            for file in files:
+                with open(os.path.join(root, file), 'rt') as f:
+                    yield ujson.load(f)
 
+    def __len__(self) -> int:
+        return len(os.listdir(self._dir.os_path))
 
-class ObjectFile(File):
+
+class PublicMetadata(Metadata):
     """
-    Base class for file objects. Allows to open (read, write) and delete objects.
-    File objects filesystem location is govern by its bucket, object_id, and ext.
-    This object store location can be overridden with a local_path.
+    A Metadata implementation based on a single .json file.
 
     Arguments:
-        bucket (str): The 'bucket' for this object.
-        object_id (str): The object_id for this object. Might contain `/` to structure
-            the bucket further. Will be mapped to directories in the filesystem.
-        ext (str): Optional extension for the object file in the filesystem.
-
-    Attributes:
-        logger: A structured logger with bucket and object information.
-        has_local_path: True, if this object is stored somewhere else in the fs.
+        path: The parent directory for the metadata and lock file.
     """
-    def __init__(self, bucket: str, object_id: str, ext: str = None, local_path: str = None) -> None:
-        self.bucket = bucket
-        self.object_id = object_id
-        self.ext = ext
+    def __init__(self, path: str, lock_timeout=1) -> None:
+        self._db_file = os.path.join(path, 'metadata.json.gz')
+        self._modified = False
+        self._data: Dict[str, dict] = None
 
-        self.has_local_path = local_path is not None
-        path = Objects._os_path(self.bucket, self.object_id, self.ext)
-        path = local_path if self.has_local_path else path
+    @property
+    def data(self):
+        if self._data is None:
+            with gzip.open(self._db_file, 'rt') as f:
+                self._data = ujson.load(f)
+        return self._data
 
-        super().__init__(path)
+    def _create(self, calcs: Iterable[dict]) -> None:
+        assert not os.path.exists(self._db_file) and self._data is None
+        self._data = {data['calc_id']: data for data in calcs}
+        with gzip.open(self._db_file, 'wt') as f:
+            ujson.dump(self._data, f)
 
-    def bind_logger(self, logger):
-        """ Adds context information to the given logger and returns it. """
-        return super().bind_logger(logger).bind(bucket=self.bucket, object=self.object_id)
+    def insert(self, calc: dict) -> None:
+        assert self.data is not None, "Metadata is not open."
 
-    def delete(self) -> None:
-        """ Deletes the file, if it has not a localpath. Localpath files are never deleted.  """
-        # Do not delete local files, no matter what
-        if not self.has_local_path:
-            super().delete()
+        id = calc['calc_id']
+        assert id not in self.data
+        self.data[id] = calc
+        self._modified = True
 
+    def update(self, calc_id: str, updates: dict) -> dict:
+        raise NotImplementedError
 
-class FileError(Exception):
-    def __init__(self, msg, cause):
-        super().__init__(msg, cause)
+    def get(self, calc_id: str) -> dict:
+        return self.data[calc_id]
 
+    def __iter__(self) -> Iterator[dict]:
+        return self.data.values().__iter__()
 
-class UploadFile(ObjectFile):
-    """
-    Instances of ``UploadFile`` represent an uploaded file in the *'object storage'*.
+    def __len__(self) -> int:
+        return len(self.data)
 
-    Currently only user ``.zip`` files are supported.
 
-    Uploads can be extracted to tmp storage (open/close), the list of files in
-    the upload is provided, and files can be opened for read. Extracting uploads
-    is optional, all functions in this module are also available without extracting.
-    Extracts are automatically bagged with *bagit*.
+class Restricted(Exception):
+    pass
 
-    This class is a context manager, that extracts the file when using a ``with``
-    statement with instances of this class.
 
-    UploadFiles are stored in their own *bucket*. But, storage can be overridden
-    by providing a ``local_path``. This is useful when the file is already stored
-    in nomad's distributed file system, e.g. for bulk processing of already uploaded
-    files.
+class UploadFiles(DirectoryObject, datamodel.Entity, metaclass=ABCMeta):
 
-    Uploads can be persistet as :class:`ZippedDataContainers` for permanent repository
-    raw data storage.
+    _archive_ext = 'json'
 
-    Arguments:
-        upload_id: The upload of this uploaded file.
-        local_path: Optional override for the path used to store/access the uploaded file.
+    def __init__(
+            self, bucket: str, upload_id: str,
+            is_authorized: Callable[[], bool] = lambda: False,
+            create: bool = False) -> None:
+        self.logger = utils.get_logger(__name__, upload_id=upload_id)
 
-    Attributes:
-        is_extracted: True if the upload is extracted.
-        upload_extract_dir: The path of the tmp directory with the extracted contents.
-        filelist: A list of filenames relative to the .zipped upload root.
-    """
+        super().__init__(bucket, upload_id, create=create, prefix=True)
 
-    formats = ['zip']
-    """ A human readable list of supported file formats. """
+        if not create and not self.exists():
+            raise KeyError()
 
-    def __init__(self, upload_id: str, local_path: str = None) -> None:
-        super().__init__(
-            bucket=config.files.uploads_bucket,
-            object_id=upload_id,
-            ext='zip',
-            local_path=local_path)
-
-        self._extract_dir: str = os.path.join(config.fs.tmp, 'uploads_extracted', upload_id)
-        self._bagged_container: DataContainer = None
-        if os.path.isdir(self._extract_dir):
-            self._bagged_container = BaggedDataContainer(self._extract_dir)
-
-    def bind_logger(self, logger):
-        return super().bind_logger(logger).bind(upload_id=self.object_id)
-
-    # There is not good way to capsule decorators in a class:
-    # https://medium.com/@vadimpushtaev/decorator-inside-python-class-1e74d23107f6
-    class Decorators:
-        @classmethod
-        def handle_errors(cls, decorated):
-            def wrapper(self, *args, **kwargs):
-                try:
-                    return decorated(self, *args, **kwargs)
-                except Exception as e:
-                    msg = 'Could not %s upload.' % decorated.__name__
-                    self.logger.error(msg, upload_id=self.object_id, exc_info=e)
-                    raise FileError(msg, e)
-            return wrapper
-
-    @contextmanager
-    def _zip(self):
-        assert self.exists(), "Can only access uploaded file if it exists."
-        zip_file = None
-        try:
-            zip_file = ZipFile(self.os_path)
-            yield zip_file
-        except BadZipFile as e:
-            raise FileError('Upload is not a zip file', e)
-        finally:
-            if zip_file is not None:
-                zip_file.close()
+        self.upload_id = upload_id
+        self._is_authorized = is_authorized
 
-    @property
-    def filelist(self) -> List[str]:
-        if self.is_extracted:
-            return self._bagged_container.manifest
+    @staticmethod
+    def get(upload_id: str, *args, **kwargs) -> 'UploadFiles':
+        if DirectoryObject(config.files.staging_bucket, upload_id, prefix=True).exists():
+            return StagingUploadFiles(upload_id, *args, **kwargs)
+        elif DirectoryObject(config.files.public_bucket, upload_id, prefix=True).exists():
+            return PublicUploadFiles(upload_id, *args, **kwargs)
         else:
-            with self._zip() as zip_file:
-                return [
-                    zip_info.filename for zip_info in zip_file.filelist
-                    if not zip_info.filename.endswith('/')]
+            return None
 
     @property
-    def is_extracted(self) -> bool:
-        return self._bagged_container is not None
+    def metadata(self) -> Metadata:
+        """ The calc metadata for this upload. """
+        raise NotImplementedError
 
-    @Decorators.handle_errors
-    def upload_hash(self) -> str:
-        assert self.is_extracted
-        return self._bagged_container.hash
+    def raw_file(self, file_path: str, *args, **kwargs) -> IO:
+        """
+        Opens a raw file and returns a file-like objects. Additional args, kwargs are
+        delegated to the respective `open` call.
+        Arguments:
+            file_path: The path to the file relative to the upload.
+        Raises:
+            KeyError: If the file does not exist.
+            Restricted: If the file is restricted and upload access evaluated to False.
+        """
+        raise NotImplementedError()
 
-    @Decorators.handle_errors
-    def extract(self) -> None:
+    def raw_file_manifest(self, path_prefix: str = None) -> Generator[str, None, None]:
         """
-        'Opens' the upload. This means the upload files get extracted and bagged to tmp.
+        Returns the path for all raw files in the archive (with a given prefix).
+        Arguments:
+            path_prefix: An optional prefix; only returns those files that have the prefix.
+        Returns:
+            An iterable over all (matching) raw files.
+        """
+        raise NotImplementedError()
 
+    def archive_file(self, calc_id: str, *args, **kwargs) -> IO:
+        """
+        Opens a archive file and returns a file-like objects. Additional args, kwargs are
+        delegated to the respective `open` call.
+        Arguments:
+            calc_id: The id identifying the calculation.
         Raises:
-            UploadFileError: If some IO went wrong.
-            KeyError: If the upload does not exist.
+            KeyError: If the calc does not exist.
+            Restricted: If the file is restricted and upload access evaluated to False.
         """
-        os.makedirs(os.path.join(config.fs.tmp, 'uploads_extracted'), exist_ok=True)
+        raise NotImplementedError()
 
-        with self._zip() as zip_file:
-            zip_file.extractall(self._extract_dir)
+    def archive_log_file(self, calc_id: str, *args, **kwargs) -> IO:
+        """
+        Opens a archive log file and returns a file-like objects. Additional args, kwargs are
+        delegated to the respective `open` call.
+        Arguments:
+            calc_id: The id identifying the calculation.
+        Raises:
+            KeyError: If the calc does not exist.
+            Restricted: If the file is restricted and upload access evaluated to False.
+        """
+        raise NotImplementedError()
 
-        self.logger.debug('extracted uploaded file')
 
-        self._bagged_container = BaggedDataContainer.create(self._extract_dir)
-        self.logger.debug('bagged uploaded file')
+class StagingUploadFiles(UploadFiles):
+    def __init__(self, *args, **kwargs) -> None:
+        super().__init__(config.files.staging_bucket, *args, **kwargs)
 
-    def persist(self, object_id: str = None):
-        """
-        Persists the extracted and bagged upload to the repository raw data bucket.
-        """
-        assert self.is_extracted
-        if object_id is None:
-            object_id = self.upload_hash()
+        self._raw_dir = self.join_dir('raw')
+        self._archive_dir = self.join_dir('archive')
+        self._frozen_file = self.join_file('.frozen')
 
-        target = Objects._os_path(config.files.raw_bucket, object_id, 'zip')
-        directory = os.path.dirname(target)
-        if not os.path.isdir(directory):
-            os.makedirs(directory)
+        metadata_dir = self.join_dir('metadata')
+        self._metadata = StagingMetadata(metadata_dir)
 
-        return ZippedDataContainer.create(self._extract_dir, target=target)
+        self._size = 0
 
-    @Decorators.handle_errors
-    def remove_extract(self) -> None:
-        """
-        Closes the upload. This means the tmp. files are deleted.
+    @property
+    def size(self) -> int:
+        return self._size
 
-        Raises:
-            UploadFileError: If some IO went wrong.
-            KeyError: If the upload does not exist.
-        """
+    @property
+    def metadata(self) -> StagingMetadata:
+        if not self._is_authorized():
+            raise Restricted
+        return self._metadata
+
+    def _file(self, path_object: PathObject, *args, **kwargs) -> IO:
         try:
-            shutil.rmtree(self._extract_dir)
+            return open(path_object.os_path, *args, **kwargs)
         except FileNotFoundError:
             raise KeyError()
 
-        self.logger.debug('removed uploaded file extract')
+    def raw_file(self, file_path: str, *args, **kwargs) -> IO:
+        if not self._is_authorized():
+            raise Restricted
+        return self._file(self.raw_file_object(file_path), *args, **kwargs)
 
-    def __enter__(self):
-        self.extract()
-        return self
+    def raw_file_object(self, file_path: str) -> PathObject:
+        return self._raw_dir.join_file(file_path)
 
-    def __exit__(self, exc_type, exc, exc_tb):
-        self.remove_extract()
+    def archive_file(self, calc_id: str, *args, **kwargs) -> IO:
+        if not self._is_authorized():
+            raise Restricted
+        return self._file(self.archive_file_object(calc_id), *args, **kwargs)
 
-    def get_file(self, filename: str) -> File:
+    def archive_log_file(self, calc_id: str, *args, **kwargs) -> IO:
+        if not self._is_authorized():
+            raise Restricted
+        return self._file(self.archive_log_file_object(calc_id), *args, **kwargs)
+
+    def archive_file_object(self, calc_id: str) -> PathObject:
+        return self._archive_dir.join_file('%s.%s' % (calc_id, self._archive_ext))
+
+    def archive_log_file_object(self, calc_id: str) -> PathObject:
+        return self._archive_dir.join_file('%s.log' % calc_id)
+
+    def add_rawfiles(self, path: str, move: bool = False, prefix: str = None, force_archive: bool = False) -> None:
         """
-        Returns a :class:`File` instance as a handle to the file with the given name.
-        Only works on extracted uploads. The given filename must be one of the
-        name in ``self.filelist``.
+        Add rawfiles to the upload. The given file will be copied, moved, or extracted.
+        Arguments:
+            path: Path to a directory, file, or zip file. Zip files will be extracted.
+            move: Whether the file should be moved instead of copied. Zips will be extracted and then deleted.
+            prefix: Optional path prefix for the added files.
+            force_archive: Expect the file to be a zip or other support archive file.
+                Usually those files are only extracted if they can be extracted and copied instead.
         """
-        assert self.is_extracted
-        return self._bagged_container.get_file(filename)
+        assert not self.is_frozen
+        assert os.path.exists(path)
+        self._size += os.stat(path).st_size
+        target_dir = self._raw_dir
+        if prefix is not None:
+            target_dir = target_dir.join_dir(prefix, create=True)
+        ext = os.path.splitext(path)[1]
+        if force_archive or ext == '.zip':
+            try:
+                with ZipFile(path) as zf:
+                    zf.extractall(target_dir.os_path)
+                if move:
+                    os.remove(path)
+                return
+            except BadZipFile:
+                pass
+
+        if force_archive or ext in ['.tgz', '.tar.gz', '.tar.bz2']:
+            try:
+                with tarfile.open(path) as tf:
+                    tf.extractall(target_dir.os_path)
+                if move:
+                    os.remove(path)
+                return
+            except tarfile.TarError:
+                pass
+
+        if force_archive:
+            raise ExtractError
+
+        if move:
+            shutil.move(path, target_dir.os_path)
+        else:
+            shutil.copy(path, target_dir.os_path)
 
     @property
-    def is_valid(self):
-        return is_zipfile(self.os_path)
+    def is_frozen(self) -> bool:
+        """ Returns True if this upload is already *bagged*. """
+        return self._frozen_file.exists()
 
-    def get_siblings(self, filename: str) -> Generator[str, None, None]:
+    def pack(self, bagit_metadata: dict = None) -> None:
+        """
+        Replaces the staging upload data with a public upload record by packing all
+        data into files. It is only available if upload *is_bag*.
+        This is potentially a long running operation.
+        Arguments:
+            bagit_metadata: Additional data added to the bagit metadata.
+        """
+        # freeze the upload
+        assert not self.is_frozen, "Cannot pack an upload that is packed, or packing."
+        with open(self._frozen_file.os_path, 'wt') as f:
+            f.write('frozen')
+
+        # create tmp dirs for restricted and public raw data
+        restricted_dir = self.join_dir('.restricted', create=False)
+        public_dir = self.join_dir('.public', create=True)
+
+        # copy raw -> .restricted
+        shutil.copytree(self._raw_dir.os_path, restricted_dir.os_path)
+
+        # move public data .restricted -> .public
+        for calc in self.metadata:
+            if not calc.get('restricted', True):
+                mainfile: str = calc['mainfile']
+                assert mainfile is not None
+                for filepath in self.calc_files(mainfile):
+                    os.rename(
+                        restricted_dir.join_file(filepath).os_path,
+                        public_dir.join_file(filepath).os_path)
+
+        # create bags
+        make_bag(restricted_dir.os_path, bag_info=bagit_metadata, checksums=['sha512'])
+        make_bag(public_dir.os_path, bag_info=bagit_metadata, checksums=['sha512'])
+
+        # zip bags
+        def zip_dir(zip_filepath, path):
+            root_len = len(path)
+            with ZipFile(zip_filepath, 'w') as zf:
+                for root, _, files in os.walk(path):
+                    for file in files:
+                        filepath = os.path.join(root, file)
+                        zf.write(filepath, filepath[root_len:])
+
+        packed_dir = self.join_dir('.packed', create=True)
+
+        zip_dir(packed_dir.join_file('raw-restricted.bagit.zip').os_path, restricted_dir.os_path)
+        zip_dir(packed_dir.join_file('raw-public.bagit.zip').os_path, public_dir.os_path)
+
+        # zip archives
+        def create_zipfile(prefix: str) -> ZipFile:
+            file = packed_dir.join_file('archive-%s.%s.zip' % (prefix, self._archive_ext))
+            return ZipFile(file.os_path, mode='w')
+
+        archive_public_zip = create_zipfile('public')
+        archive_restricted_zip = create_zipfile('restricted')
+
+        for calc in self.metadata:
+            archive_zip = archive_restricted_zip if calc.get('restricted', False) else archive_public_zip
+
+            archive_filename = '%s.%s' % (calc['calc_id'], self._archive_ext)
+            archive_zip.write(self._archive_dir.join_file(archive_filename).os_path, archive_filename)
+
+            archive_log_filename = '%s.%s' % (calc['calc_id'], 'log')
+            log_file = self._archive_dir.join_file(archive_log_filename)
+            if log_file.exists():
+                archive_zip.write(log_file.os_path, archive_log_filename)
+
+        archive_restricted_zip.close()
+        archive_public_zip.close()
+
+        # pack metadata
+        packed_metadata = PublicMetadata(packed_dir.os_path)
+        packed_metadata._create(self._metadata)
+
+        # move to public bucket
+        target_dir = DirectoryObject(config.files.public_bucket, self.upload_id, create=False, prefix=True)
+        assert not target_dir.exists()
+        shutil.move(packed_dir.os_path, target_dir.os_path)
+
+    def raw_file_manifest(self, path_prefix: str = None) -> Generator[str, None, None]:
+        upload_prefix_len = len(self._raw_dir.os_path) + 1
+        for root, _, files in os.walk(self._raw_dir.os_path):
+            for file in files:
+                path = os.path.join(root, file)[upload_prefix_len:]
+                if path_prefix is None or path.startswith(path_prefix):
+                    yield path
+
+    def calc_files(self, mainfile: str, with_mainfile: bool = True) -> Iterable[str]:
         """
-        Returns the names of all files that share the same prefix (object id),
-        respectively are part of the same directory (incl. files in sub directories).
-        In nomad terms, the aux files the this file. Returned siblings are relative
-        to the upload root directory.
+        Returns all the auxfiles and mainfile for a given mainfile. This implements
+        nomad's logic about what is part of a calculation and what not. The mainfile
+        is first entry, the rest is sorted.
+        Arguments:
+            mainfile: The mainfile relative to upload
+            with_mainfile: Do include the mainfile, default is True
         """
-        dirname = os.path.dirname(filename)
-        for other in self.filelist:
-            if other.startswith(dirname) and other != filename:
-                yield other
+        mainfile_object = self._raw_dir.join_file(mainfile)
+        if not mainfile_object.exists():
+            raise KeyError()
 
+        mainfile_basename = os.path.basename(mainfile)
+        calc_dir = os.path.dirname(mainfile_object.os_path)
+        calc_relative_dir = calc_dir[len(self._raw_dir.os_path) + 1:]
+        aux_files = sorted(
+            os.path.join(calc_relative_dir, path) for path in os.listdir(calc_dir)
+            if os.path.isfile(os.path.join(calc_dir, path)) and path != mainfile_basename)
+        if with_mainfile:
+            return [mainfile] + aux_files
+        else:
+            return aux_files
 
-class RepositoryFile(ObjectFile):
-    """
-    Represents a repository file. A repository file is a persistet bagged upload, incl.
-    the upload metadata. It is used to serve raw data.
-    """
-    def __init__(self, upload_hash: str) -> None:
-        super().__init__(
-            bucket=config.files.raw_bucket,
-            object_id=upload_hash,
-            ext='zip')
+    def _websave_hash(self, hash: bytes, length: int = 0) -> str:
+        if length > 0:
+            return base64.b64encode(hash, altchars=b'-_')[0:28].decode('utf-8')
+        else:
+            return base64.b64encode(hash, altchars=b'-_')[0:-2].decode('utf-8')
 
-        self.zipped_container = ZippedDataContainer(self.os_path)
+    def calc_id(self, mainfile: str) -> str:
+        """
+        Calculates a id for the given calc.
+        Arguments:
+            mainfile: The mainfile path relative to the upload that identifies the calc in the folder structure.
+        Returns:
+            The calc id
+        Raises:
+            KeyError: If the mainfile does not exist.
+        """
+        hash = hashlib.sha512()
+        hash.update(self.upload_id.encode('utf-8'))
+        hash.update(mainfile.encode('utf-8'))
+        return self._websave_hash(hash.digest(), utils.default_hash_len)
 
-    def get_file(self, path: str) -> ZippedFile:
-        return self.zipped_container.get_file(path)
+    def calc_hash(self, mainfile: str) -> str:
+        """
+        Calculates a hash for the given calc based on file contents and aux file contents.
+        Arguments:
+            mainfile: The mainfile path relative to the upload that identifies the calc in the folder structure.
+        Returns:
+            The calculated hash
+        Raises:
+            KeyError: If the mainfile does not exist.
+        """
+        hash = hashlib.sha512()
+        for filepath in self.calc_files(mainfile):
+            with open(self._raw_dir.join_file(filepath).os_path, 'rb') as f:
+                for data in iter(lambda: f.read(65536), b''):
+                    hash.update(data)
 
-    @property
-    def manifest(self) -> List[str]:
-        return self.zipped_container.manifest
+        return self._websave_hash(hash.digest(), utils.default_hash_len)
 
 
-class ArchiveFile(ObjectFile):
+class ArchiveBasedStagingUploadFiles(StagingUploadFiles):
     """
-    Represents the archive file for an individual calculation. Allows to write the
-    archive, read the archive, delete the archive.
+    :class:`StagingUploadFiles` based on a single uploaded archive file (.zip)
 
-    Archive files are stored in their own *bucket*.
+    Arguments:
+        local_path: Optional override for the path used to store/access the uploaded file.
     """
-    def __init__(self, archive_id: str) -> None:
-        super().__init__(
-            bucket=config.files.archive_bucket,
-            object_id=archive_id,
-            ext='json.gz' if config.files.compress_archive else 'json')
-
-    def bind_logger(self, logger):
-        upload_hash, calc_hash = self.object_id.split('/')
-        return super().bind_logger(logger).bind(
-            archive_id=self.object_id, upload_hash=upload_hash, calc_hash=calc_hash)
-
-    @contextmanager
-    def write_archive_json(self) -> Generator[TextIO, None, None]:
-        """ Context manager that yields a file-like to write the archive json. """
-        with self.open('wb') as binary_out:
-            if config.files.compress_archive:
-                gzip_wrapper = cast(TextIO, gzip.open(binary_out, 'wt'))
-                out = gzip_wrapper
-            else:
-                text_wrapper = io.TextIOWrapper(binary_out, encoding='utf-8')
-                out = text_wrapper
-
-            try:
-                yield out
-            finally:
-                out.flush()
-                out.close()
 
-        self.logger.debug('archive file written')
-
-    @contextmanager
-    def read_archive_json(self) -> Generator[TextIO, None, None]:
-        """ Context manager that yields a file-like to read the archive json. """
-        with self.open(mode='rb') as binary_in:
-            try:
-                if config.files.compress_archive:
-                    gzip_wrapper = cast(TextIO, gzip.open(binary_in, 'rt'))
-                    in_file = gzip_wrapper
-                else:
-                    text_wrapper = io.TextIOWrapper(binary_in, encoding='utf-8')
-                    in_file = text_wrapper
-            except FileNotFoundError:
-                raise KeyError()
+    formats = ['zip']
+    """ A human readable list of supported file formats. """
 
-            try:
-                yield in_file
-            finally:
-                in_file.close()
+    def __init__(
+            self, upload_id: str, local_path: str = None, file_name: str = '.upload',
+            *args, **kwargs) -> None:
+        super().__init__(upload_id, *args, **kwargs)
+        self._local_path = local_path
+        self._upload_file = self.join_file(file_name)
 
-        self.logger.debug('archive file read')
+    @property
+    def upload_file_os_path(self):
+        if self._local_path is not None:
+            return self._local_path
+        else:
+            return self._upload_file.os_path
 
-    @staticmethod
-    def delete_archives(upload_hash: str):
-        """ Delete all archives of one upload with the given hash. """
-        bucket = config.files.archive_bucket
-        Objects.delete_all(bucket, upload_hash)
+    @property
+    def is_valid(self) -> bool:
+        if not os.path.exists(self.upload_file_os_path):
+            return False
+        elif not os.path.isfile(self.upload_file_os_path):
+            return False
+        else:
+            return is_zipfile(self.upload_file_os_path)
 
-        utils.get_logger(__name__, bucket=bucket, upload_hash=upload_hash) \
-            .debug('archive files deleted')
+    def extract(self) -> None:
+        assert next(self.raw_file_manifest(), None) is None, 'can only extract once'
+        super().add_rawfiles(self.upload_file_os_path, force_archive=True)
 
+    def add_rawfiles(self, path: str, move: bool = False, prefix: str = None, force_archive: bool = False) -> None:
+        assert False, 'do not add_rawfiles to a %s' % self.__class__.__name__
 
-class ArchiveLogFile(ObjectFile):
-    """
-    Represents a log file that was created for processing a single calculation to create
-    an archive.
-    Logfiles are stored within the *archive_bucket* alongside the archive files.
-    """
-    def __init__(self, archive_id: str) -> None:
-        super().__init__(
-            bucket=config.files.archive_bucket,
-            object_id=archive_id,
-            ext='log')
 
+class PublicUploadFiles(UploadFiles):
+    def __init__(self, *args, **kwargs) -> None:
+        super().__init__(config.files.public_bucket, *args, **kwargs)
 
-class DataContainer(ABC):
-    """
-    An abstract baseclass for a *data container*. A data container is a persistent
-    bundle of related files, like the calculation raw data of a user upload.
+        self._metadata = PublicMetadata(self.os_path)
 
-    A container has a *manifest* and arbitrary *metadata*.
-    """
     @property
-    def manifest(self) -> List[str]:
-        """
-        A readonly list of paths to files within the container relative to the containers
-        payload directory.
-        """
-        pass
+    def metadata(self) -> Metadata:
+        return self._metadata
 
-    @property
-    def metadata(self) -> Dict[str, Any]:
-        """
-        The modifiable metadata of this manifest. On the top-level its a string keyed
-        dictionary. The values can be arbitrary, but have to be JSON-serializable.
-        Modifications have to be saved (:func:`save_metadata`).
-        """
-        pass
+    def _file(self, prefix: str, ext: str, path: str, *args, **kwargs) -> IO:
+        mode = kwargs.get('mode') if len(args) == 0 else args[0]
+        if 'mode' in kwargs:
+            del(kwargs['mode'])
+        mode = mode if mode else 'rb'
 
-    def save_metadata(self) -> None:
-        """ Persists metadata changes. """
-        pass
+        for access in ['public', 'restricted']:
+            try:
+                zip_file = self.join_file('%s-%s.%s.zip' % (prefix, access, ext))
+                with ZipFile(zip_file.os_path) as zf:
+                    f = zf.open(path, 'r', **kwargs)
+                    if access == 'restricted' and not self._is_authorized():
+                        raise Restricted
+                    if 't' in mode:
+                        return io.TextIOWrapper(f)
+                    else:
+                        return f
+            except FileNotFoundError:
+                pass
+            except KeyError:
+                pass
 
-    def get_file(self, manifest_path: str) -> File:
-        """
-        Returns a file-like for the given manifest path.
-        """
-        pass
+        raise KeyError()
 
-    @property
-    def hash(self) -> str:
-        return self.metadata['Nomad-Hash']
+    def raw_file(self, file_path: str, *args, **kwargs) -> IO:
+        return self._file('raw', 'bagit', 'data/' + file_path, *args, *kwargs)
 
+    def raw_file_manifest(self, path_prefix: str = None) -> Generator[str, None, None]:
+        for access in ['public', 'restricted']:
+            try:
+                zip_file = self.join_file('raw-%s.bagit.zip' % access)
+                with ZipFile(zip_file.os_path) as zf:
+                    for full_path in zf.namelist():
+                        path = full_path[5:]  # remove data/
+                        if path_prefix is None or path.startswith(path_prefix):
+                            yield path
+            except FileNotFoundError:
+                pass
 
-class BaggedDataContainer(DataContainer):
-    """
-    A *data container* based on *bagit*. Once created no more files can be added.
-    """
-    def __init__(self, path: str) -> None:
-        self.path = path
-        self.bag = bagit.Bag(path)
-        self._metadata = None
-        self.payload_directory = os.path.join(path, 'data')
+    def archive_file(self, calc_id: str, *args, **kwargs) -> IO:
+        return self._file('archive', self._archive_ext, '%s.%s' % (calc_id, self._archive_ext), *args, **kwargs)
 
-    @staticmethod
-    def create(path: str) -> 'BaggedDataContainer':
+    def archive_log_file(self, calc_id: str, *args, **kwargs) -> IO:
+        return self._file('archive', self._archive_ext, '%s.log' % calc_id, *args, **kwargs)
+
+    def repack(self) -> None:
         """
-        Makes a bag from the given directory and returns the respective BaggedDataContainer
-        instance.
+        Replaces the existing public/restricted data file pairs with new ones, based
+        on current restricted information in the metadata. Should be used after updating
+        the restrictions on calculations. This is potentially a long running operation.
         """
-        bag = bagit.make_bag(path, checksums=['sha512'])
-
-        # TODO implement NOMAD-coe's way of doing the hashing
-        hashes = [
-            value['sha512'] for key, value in bag.entries.items()
-            if key.startswith('data/')
-        ]
-        bag.info['Nomad-Hash'] = utils.hash(''.join(hashes))
-
-        bag.save()
-        return BaggedDataContainer(path)
+        pass
 
-    @property
-    def metadata(self):
-        if self._metadata is None:
-            self._metadata = BaggedDataContainer._load_bagit_metadata(self.bag.info)
-        return self._metadata
 
-    @staticmethod
-    def _load_bagit_metadata(info):
-        metadata = info
-        for key, value in metadata.items():
-            if key not in bagit.STANDARD_BAG_INFO_HEADERS:
-                try:
-                    metadata[key] = json.loads(value)
-                except Exception:
-                    pass
-        return metadata
+class Calc(datamodel.Calc):
+    @classmethod
+    def load_from(cls, obj):
+        return Calc(obj.upload.upload_id, obj.calc_id)
 
-    def save_metadata(self):
-        metadata = self.bag.info
-        for key, value in metadata.items():
-            if key not in bagit.STANDARD_BAG_INFO_HEADERS and not isinstance(value, str):
-                metadata[key] = json.dumps(value)
-        self.bag.save()
+    def __init__(self, upload_id: str, calc_id: str) -> None:
+        self._calc_id = calc_id
+        self.upload_files = UploadFiles.get(upload_id, is_authorized=lambda: True)
+        if self.upload_files is None:
+            raise KeyError
+        self._data = self.upload_files.metadata.get(calc_id)
 
     @property
-    def manifest(self):
-        return [path[5:] for path in self.bag.entries.keys() if path.startswith('data/')]
-
-    def get_file(self, path):
-        return File(os.path.join(self.payload_directory, path))
-
-
-class ZippedDataContainer(File, DataContainer):
-    """
-    A *bagit*-based data container that has been zipped. Its metadata cannot be changed
-    anymore.
-    """
-    def __init__(self, os_path: str) -> None:
-        super(ZippedDataContainer, self).__init__(os_path)
-        self._metadata = None
-        self._base_directory = os.path.splitext(os.path.basename(os_path))[0]
-        self._payload_directory = '%s/data/' % self._base_directory
-        self._payload_deirectory_len = len(self._payload_directory)
+    def upload(self):
+        return self.upload_files
 
-    @staticmethod
-    def create(path: str, target: str = None) -> 'ZippedDataContainer':
-        """
-        Creates a zipped bag from a bag.
-
-        Arguments:
-            path: The path to the bag
-            target:
-                The path to the zip (excl. .zip extension). Base dir in zip will be
-                based on the target path.
-        """
-        if not target:
-            target = path + '.zip'
-
-        target = os.path.abspath(target)
-
-        assert os.path.isdir(path)
-        assert os.path.exists(os.path.dirname(target))
-
-        # manually created zipfile instead of shutils.make_zip to use base_dir from
-        # target while zipping path
-        base_dir = os.path.splitext(os.path.basename(target))[0]
-        path_prefix_len = len(path) + 1
-        with ZipFile(target, "w", compression=ZIP_DEFLATED, allowZip64=True) as zip_file:
-            for root, _, filenames in os.walk(path):
-                for name in filenames:
-                    file_path = os.path.join(root, name)
-                    zipped_path = os.path.join(base_dir, file_path[path_prefix_len:])
-                    zip_file.write(file_path, zipped_path)
-
-        return ZippedDataContainer(target)
-
-    @contextmanager
-    def zip_file(self):
-        assert self.exists(), "Can only access uploaded file if it exists."
-        zip_file = None
-        try:
-            zip_file = ZipFile(self.os_path)
-            yield zip_file
-        except BadZipFile as e:
-            raise FileError('Upload is not a zip file', e)
-        finally:
-            if zip_file is not None:
-                zip_file.close()
+    @property
+    def calc_data(self) -> dict:
+        return self._data['section_repository_info']['section_repository_parserdata']
 
     @property
-    def manifest(self):
-        with self.zip_file() as zip_file:
-            return [
-                zip_info.filename[self._payload_deirectory_len:] for zip_info in zip_file.filelist
-                if not zip_info.filename.endswith('/') and zip_info.filename.startswith(self._payload_directory)]
+    def calc_id(self) -> str:
+        return self._calc_id
 
     @property
-    def metadata(self):
-        if self._metadata is None:
-            self._metadata = self._load_metadata()
-        return self._metadata
+    def mainfile(self) -> str:
+        return self._data['section_repository_info']['repository_filepaths'][0]
+
+    def to_calc_with_metadata(self):
+        return repo_data_to_calc_with_metadata(
+            self.upload.upload_id, self.calc_id, self._data)
 
-    def _load_metadata(self):
-        with ZippedFile(self.os_path, '%s/bag-info.txt' % self._base_directory).open('r') as metadata_file:
-            metadata_contents = metadata_file.read()
 
-        metadata_file = io.StringIO(metadata_contents.decode("utf-8"))
-        tags = {}
-        for name, value in bagit._parse_tags(metadata_file):
-            if name not in tags:
-                tags[name] = value
-                continue
+def repo_data_to_calc_with_metadata(upload_id, calc_id, repo_data):
+    calc_data = repo_data['section_repository_info']['section_repository_parserdata']
 
-            if not isinstance(tags[name], list):
-                tags[name] = [tags[name], value]
-            else:
-                tags[name].append(value)
+    target = datamodel.CalcWithMetadata(upload_id=upload_id)
+    target.calc_id = calc_id
+    target.basis_set_type = calc_data['repository_basis_set_type']
+    target.crystal_system = calc_data['repository_crystal_system']
+    target.XC_functional_name = calc_data['repository_xc_treatment']
+    target.system_type = calc_data['repository_system_type']
+    target.atom_labels = calc_data['repository_atomic_elements']
+    target.space_group_number = calc_data['repository_spacegroup_nr']
+    target.chemical_composition = calc_data['repository_chemical_formula']
+    target.program_version = calc_data['repository_code_version']
+    target.program_name = calc_data['repository_program_name']
+    target.files = repo_data['section_repository_info']['repository_filepaths']
+    target.mainfile = target.files[0]
 
-        return BaggedDataContainer._load_bagit_metadata(tags)
+    return target
 
-    def get_file(self, path):
-        return ZippedFile(self.path, self._payload_directory + path)
 
-    def get_zip_path(self, path):
-        return self._payload_directory + path
+datamodel.CalcWithMetadata.register_mapping(Calc, Calc.to_calc_with_metadata)
diff --git a/nomad/infrastructure.py b/nomad/infrastructure.py
index c4ad7b849dfc4396432a8858d571706925699d71..a8498bf2349a247827afc6a47f2e4e3c8e1d0d15 100644
--- a/nomad/infrastructure.py
+++ b/nomad/infrastructure.py
@@ -20,13 +20,14 @@ infrastructure services.
 import os.path
 import shutil
 from contextlib import contextmanager
-
 import psycopg2
+import psycopg2.extensions
 from sqlalchemy import create_engine
 from sqlalchemy.orm import Session
 from elasticsearch.exceptions import RequestError
 from elasticsearch_dsl import connections
 from mongoengine import connect
+from passlib.hash import bcrypt
 
 from nomad import config, utils
 
@@ -83,8 +84,8 @@ def setup_elastic():
     logger.info('setup elastic connection')
 
     try:
-        from nomad.repo import RepoCalc
-        RepoCalc.init()
+        from nomad.search import Entry
+        Entry.init()
     except RequestError as e:
         if e.status_code == 400 and 'resource_already_exists_exception' in e.error:
             pass  # happens if two services try this at the same time
@@ -94,92 +95,234 @@ def setup_elastic():
         logger.info('init elastic index')
 
 
-def setup_repository_db():
+def setup_repository_db(**kwargs):
+    """ Creates a connection and stores it in the module variables. """
+    repo_args = dict(readonly=False)
+    repo_args.update(kwargs)
+    connection, db = sqlalchemy_repository_db(**kwargs)
+
+    global repository_db
+    global repository_db_conn
+
+    repository_db_conn, repository_db = connection, db
+    logger.info('setup repository db connection')
+
+    return repository_db_conn, repository_db
+
+
+def sqlalchemy_repository_db(exists: bool = False, readonly: bool = True, **kwargs):
     """
-    Makes sure that a minimal NOMAD-coe repository postgres db exists.
-    Returns:
-        An sqlalchemy session for the NOMAD-coe repository postgres db.
+    Returns SQLAlchemy connection and session for the given db parameters.
+
+    Arguments:
+        exists: Set to False to check and ensure db and schema existence
+        readonly: Set to False for a write enabled connection
+        **kwargs: Overwrite `config.repository_db` parameters
     """
+    dbname = kwargs.get('dbname', config.repository_db.dbname)
+    db_exists = exists
+    if not db_exists:
+        try:
+            with repository_db_connection(dbname=dbname):
+                logger.info('repository db postgres database already exists')
+                db_exists = True
+        except psycopg2.OperationalError as e:
+            if not ('database "%s" does not exist' % dbname) in str(e):
+                raise e
+
+    if not db_exists:
+        logger.info('repository db postgres database does not exist')
+        try:
+            with repository_db_connection(dbname='postgres', with_trans=False) as con:
+                with con.cursor() as cursor:
+                    cursor.execute("CREATE DATABASE %s  ;" % dbname)
+                logger.info('repository db postgres database created')
+        except Exception as e:
+            logger.info('could not create repository db postgres database', exc_info=e)
+            raise e
+
     # ensure that the schema exists
-    with repository_db_connection() as conn:
-        with conn.cursor() as cur:
-            cur.execute(
-                "select exists(select * from information_schema.tables "
-                "where table_name='users')")
-            exists = cur.fetchone()[0]
+    schema_exists = exists
+    if not schema_exists:
+        with repository_db_connection(dbname=dbname) as conn:
+            with conn.cursor() as cur:
+                cur.execute(
+                    "select exists(select * from information_schema.tables "
+                    "where table_name='users')")
+                schema_exists = cur.fetchone()[0]
+        if not schema_exists:
+            logger.info('repository db postgres schema does not exists')
+            reset_repository_db_schema(dbname=dbname)
+        else:
+            logger.info('repository db postgres schema already exists')
 
+    # set the admin user password
     if not exists:
-        reset_repository_db()
-
-    global repository_db
-    global repository_db_conn
-
-    url = 'postgresql://%s:%s@%s:%d/%s' % (
-        config.repository_db.user,
-        config.repository_db.password,
-        config.repository_db.host,
-        config.repository_db.port,
-        config.repository_db.dbname)
+        with repository_db_connection(dbname=dbname) as conn:
+            with conn.cursor() as cur:
+                cur.execute(
+                    "UPDATE public.users SET password='%s' WHERE user_id=1;" %
+                    bcrypt.encrypt(config.services.admin_password, ident='2y'))
+
+    def no_flush():
+        pass
+
+    params = config.repository_db._asdict()
+    params.update(**kwargs)
+    url = 'postgresql://%s:%s@%s:%d/%s' % utils.to_tuple(params, 'user', 'password', 'host', 'port', 'dbname')
     engine = create_engine(url, echo=False)
 
     repository_db_conn = engine.connect()
     repository_db = Session(bind=repository_db_conn, autocommit=True)
-    logger.info('setup repository db')
+    if readonly:
+        repository_db.flush = no_flush
+
+    return repository_db_conn, repository_db
 
 
 def reset():
-    """ Resets the databases mongo, elastic/calcs, and repository db. Be careful. """
-    logger.info('reset mongodb')
-    mongo_client.drop_database(config.mongo.db_name)
+    """
+    Resets the databases mongo, elastic/calcs, repository db and all files. Be careful.
+    In contrast to :func:`remove`, it will only remove the contents of dbs and indicies.
+    This function just attempts to remove everything, there is no exception handling
+    or any warranty it will succeed.
+    """
+    try:
+        if not mongo_client:
+            setup_mongo()
+        mongo_client.drop_database(config.mongo.db_name)
+        logger.info('mongodb resetted')
+    except Exception as e:
+        logger.error('exception reset mongodb', exc_info=e)
 
-    logger.info('reset elastic search')
-    elastic_client.indices.delete(index=config.elastic.index_name)
-    from nomad.repo import RepoCalc
-    RepoCalc.init()
+    try:
+        if not elastic_client:
+            setup_elastic()
+        elastic_client.indices.delete(index=config.elastic.index_name)
+        from nomad.search import Entry
+        Entry.init()
+        logger.info('elastic index resetted')
+    except Exception as e:
+        logger.error('exception resetting elastic', exc_info=e)
 
-    logger.info('reset repository db')
-    reset_repository_db()
+    try:
+        reset_repository_db()
+        logger.info('repository db resetted')
+    except Exception as e:
+        logger.error('exception resetting repository db', exc_info=e)
 
     logger.info('reset files')
-    shutil.rmtree(config.fs.objects, ignore_errors=True)
-    shutil.rmtree(config.fs.tmp, ignore_errors=True)
+    try:
+        shutil.rmtree(config.fs.objects, ignore_errors=True)
+        shutil.rmtree(config.fs.tmp, ignore_errors=True)
+    except Exception as e:
+        logger.error('exception deleting files', exc_info=e)
+
+
+def remove():
+    """
+    Removes the databases mongo, elastic, repository db, and all files. Be careful.
+    This function just attempts to remove everything, there is no exception handling
+    or any warranty it will succeed.
+    """
+    try:
+        if not mongo_client:
+            setup_mongo()
+        mongo_client.drop_database(config.mongo.db_name)
+        logger.info('mongodb deleted')
+    except Exception as e:
+        logger.error('exception deleting mongodb', exc_info=e)
+
+    try:
+        if not elastic_client:
+            setup_elastic()
+        elastic_client.indices.delete(index=config.elastic.index_name)
+        logger.info('elastic index')
+    except Exception as e:
+        logger.error('exception deleting elastic', exc_info=e)
+
+    try:
+        if repository_db is not None:
+            repository_db.expunge_all()
+            repository_db.invalidate()
+        if repository_db_conn is not None:
+            repository_db_conn.close()
+        with repository_db_connection(dbname='postgres', with_trans=False) as con:
+            with con.cursor() as cur:
+                cur.execute('DROP DATABASE IF EXISTS %s' % config.repository_db.dbname)
+        logger.info('repository db deleted')
+    except Exception as e:
+        logger.error('exception deleting repository db', exc_info=e)
+
+    logger.info('reset files')
+    try:
+        shutil.rmtree(config.fs.objects, ignore_errors=True)
+        shutil.rmtree(config.fs.tmp, ignore_errors=True)
+    except Exception as e:
+        logger.error('exception deleting files', exc_info=e)
 
 
 @contextmanager
-def repository_db_connection():
+def repository_db_connection(dbname=None, with_trans=True):
     """ Contextmanager for a psycopg2 session for the NOMAD-coe repository postgresdb """
+    repository_db_dict = config.repository_db._asdict()
+    if dbname is not None:
+        repository_db_dict.update(dbname=dbname)
     conn_str = "host='%s' port=%d dbname='%s' user='%s' password='%s'" % (
-        config.repository_db.host,
-        config.repository_db.port,
-        config.repository_db.dbname,
-        config.repository_db.user,
-        config.repository_db.password)
+        repository_db_dict['host'],
+        repository_db_dict['port'],
+        repository_db_dict['dbname'],
+        repository_db_dict['user'],
+        repository_db_dict['password'])
 
     conn = psycopg2.connect(conn_str)
+    if not with_trans:
+        conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
     try:
         yield conn
     except Exception as e:
         logger.error('Unhandled exception within repository db connection.', exc_info=e)
         conn.rollback()
         conn.close()
-        return
+        raise e
 
     conn.commit()
     conn.close()
 
 
 def reset_repository_db():
-    """ Drops the existing NOMAD-coe repository postgres db and creates a new minimal one. """
-    with repository_db_connection() as conn:
+    """ Drops the existing NOMAD-coe repository postgres schema and creates a new minimal one. """
+    global repository_db
+    global repository_db_conn
+
+    # invalidate and close all connections and sessions
+    if repository_db is not None:
+        repository_db.expunge_all()
+        repository_db.invalidate()
+        repository_db.close_all()
+    if repository_db_conn is not None:
+        repository_db_conn.close()
+        repository_db_conn.engine.dispose()
+
+    # perform the reset
+    reset_repository_db_schema()
+
+    # try tp repair existing db connections
+    if repository_db is not None:
+        new_connection, repository_db = setup_repository_db(exists=False)
+        repository_db.bind = new_connection
+        repository_db_conn = new_connection
+
+
+def reset_repository_db_schema(**kwargs):
+    with repository_db_connection(with_trans=False, **kwargs) as conn:
         with conn.cursor() as cur:
+            cur.execute("DROP SCHEMA IF EXISTS public CASCADE;")
+
             cur.execute(
-                "DROP SCHEMA public CASCADE;"
                 "CREATE SCHEMA public;"
                 "GRANT ALL ON SCHEMA public TO postgres;"
                 "GRANT ALL ON SCHEMA public TO public;")
             sql_file = os.path.join(os.path.dirname(__file__), 'empty_repository_db.sql')
             cur.execute(open(sql_file, 'r').read())
-
-
-if __name__ == '__main__':
-    reset_repository_db()
+            logger.info('(re-)created repository db postgres schema')
diff --git a/nomad/migration.py b/nomad/migration.py
new file mode 100644
index 0000000000000000000000000000000000000000..4ef5cc08bd92fca5c7f05302aea810aa4cd83bbc
--- /dev/null
+++ b/nomad/migration.py
@@ -0,0 +1,417 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This module contains functions to read data from NOMAD coe, external sources,
+other/older nomad@FAIRDI instances to mass upload it to a new nomad@FAIRDI instance.
+
+.. autoclass:: NomadCOEMigration
+.. autoclass:: SourceCalc
+"""
+
+from typing import Generator, Tuple, List
+import os.path
+import json
+import zipstream
+import zipfile
+import math
+from mongoengine import Document, IntField, StringField, DictField
+from passlib.hash import bcrypt
+from werkzeug.contrib.iterio import IterIO
+import time
+from bravado.exception import HTTPNotFound
+from datetime import datetime
+
+from nomad import utils, config, infrastructure
+from nomad.files import repo_data_to_calc_with_metadata
+from nomad.coe_repo import User, Calc
+from nomad.datamodel import CalcWithMetadata
+from nomad.processing import FAILURE, SUCCESS
+
+
+class SourceCalc(Document):
+    """
+    Mongo document used as a calculation, upload, and metadata db and index
+    build from a given source db. Each :class:`SourceCacl` entry relates
+    a pid, mainfile, upload "id" with each other for a corressponding calculation.
+    It might alos contain the user metadata. The uploads are "id"ed via the
+    specific path segment that identifies an upload on the CoE repo FS(s) without
+    any prefixes (e.g. $EXTRACTED, /data/upload, etc.)
+    """
+    pid = IntField(primary_key=True)
+    mainfile = StringField()
+    upload = StringField()
+    metadata = DictField()
+
+    extracted_prefix = '$EXTRACTED/'
+    sites = ['/data/nomad/extracted/', '/nomad/repository/extracted/']
+    prefixes = [extracted_prefix] + sites
+
+    meta = dict(indexes=['pid', 'upload'])
+
+    _dataset_cache: dict = {}
+
+    @staticmethod
+    def index(source, drop: bool = False, with_metadata: bool = True, per_query: int = 100) \
+            -> Generator[Tuple['SourceCalc', int], None, None]:
+        """
+        Creates a collection of :class:`SourceCalc` documents that represent source repo
+        db entries.
+
+        Arguments:
+            source: The source db sql alchemy session
+            drop: True to drop and create a new collection, update the existing otherwise,
+                default is False.
+            with_metadata: True to also grab all metadata and store it, default is True.
+            per_query: The implementation tries to grab almost all data with a heavely joined
+                query on the CoE snoflake/star shaped schema.
+                The query cannot ask for the whole db at once: choose how many calculations
+                should be read at a time to optimize for your application.
+
+        Returns:
+            yields tuples (:class:`SourceCalc`, #calcs_total)
+        """
+        if drop:
+            SourceCalc.drop_collection()
+
+        last_source_calc = SourceCalc.objects().order_by('-pid').first()
+        start_pid = last_source_calc.pid if last_source_calc is not None else 0
+        source_query = source.query(Calc)
+        total = source_query.count()
+
+        while True:
+            calcs = source_query \
+                .filter(Calc.coe_calc_id > start_pid) \
+                .order_by(Calc.coe_calc_id) \
+                .limit(per_query)
+
+            source_calcs = []
+            for calc in calcs:
+                if calc.calc_metadata is None or calc.calc_metadata.filenames is None:
+                    yield None, total
+                    continue  # dataset case
+
+                filenames = json.loads(calc.calc_metadata.filenames.decode('utf-8'))
+                filename = filenames[0]
+                for prefix in SourceCalc.prefixes:
+                    filename = filename.replace(prefix, '')
+                segments = [file.strip('\\') for file in filename.split('/')]
+
+                source_calc = SourceCalc(pid=calc.pid)
+                source_calc.upload = segments[0]
+                source_calc.mainfile = os.path.join(*segments[1:])
+                if with_metadata:
+                    source_calc.metadata = calc.to(CalcWithMetadata)
+                source_calcs.append(source_calc)
+                start_pid = source_calc.pid
+
+                yield source_calc, total
+
+            if len(source_calcs) == 0:
+                break
+            else:
+                SourceCalc.objects.insert(source_calcs)
+
+
+class NomadCOEMigration:
+    """
+    Drives a migration from the NOMAD coe repository db to nomad@FAIRDI. It is assumed
+    that this class is never used on the worker or api service. It assumes the
+    default coe repo connection as a connection to the source repository db.
+
+    Attributes:
+        source: SQLAlchemy session for the source NOMAD coe repository db.
+
+    Arguments:
+        sites: Directories that might contain uploads to migrate. Use to override defaults.
+        pid_prefix: All PIDs for previously unknown calculations will get a PID higher
+            than that. Use to override default.
+    """
+
+    default_sites = [
+        '/nomad/repository/data/uploads',
+        '/nomad/repository/data/extracted',
+        '/data/nomad/uploaded/',
+        '/data/nomad/extracted/']
+
+    default_pid_prefix = int(1e7)
+
+    archive_filename = 'archive.tar.gz'
+    """ The standard name for tarred uploads in the CoE repository. """
+
+    def __init__(
+            self,
+            sites: List[str] = default_sites,
+            pid_prefix: int = default_pid_prefix) -> None:
+
+        self.sites, self.pid_prefix = sites, pid_prefix
+        self.logger = utils.get_logger(__name__)
+        self._client = None
+        self.source = infrastructure.repository_db
+
+    @property
+    def client(self):
+        if self._client is None:
+            from nomad.client import create_client
+            self._client = create_client()
+
+        return self._client
+
+    def copy_users(self, target_db):
+        """ Copy all users, keeping their ids, within a single transaction. """
+        target_db.begin()
+        for source_user in self.source.query(User).all():
+            self.source.expunge(source_user)  # removes user from the source session
+            target_db.merge(source_user)
+
+        admin = target_db.query(User).filter_by(email='admin').first()
+        if admin is None:
+            admin = User(
+                user_id=0, email='admin', first_name='admin', last_name='admin',
+                password=bcrypt.encrypt(config.services.admin_password, ident='2y'))
+            target_db.add(admin)
+        target_db.commit()
+
+    def _validate(self, upload_id: str, calc_id: str, source_calc: dict, logger) -> bool:
+        """
+        Validates the given processed calculation, assuming that the data in the given
+        source_calc is correct.
+
+        Returns:
+            False, if the calculation differs from the source calc.
+        """
+        repo_calc = self.client.repo.get_repo_calc(
+            upload_id=upload_id, calc_id=calc_id).response().result
+
+        is_valid = True
+        target_calc = repo_data_to_calc_with_metadata(upload_id, calc_id, repo_calc)
+
+        for key, target_value in target_calc.items():
+            if key in ['calc_id', 'upload_id', 'files']:
+                continue
+
+            source_value = source_calc.get(key, None)
+
+            def report_mismatch():
+                logger.info(
+                    'source target missmatch', quantity=key,
+                    source_value=source_value, target_value=target_value)
+
+            if (source_value is None or target_value is None) and source_value != target_value:
+                report_mismatch()
+                is_valid = False
+                continue
+
+            if isinstance(target_value, list):
+                if len(set(source_value).intersection(target_value)) != len(target_value):
+                    report_mismatch()
+                    is_valid = False
+                continue
+
+            if isinstance(source_value, str):
+                source_value = source_value.lower()
+                target_value = str(target_value).lower()
+
+            if source_value != target_value:
+                report_mismatch()
+                is_valid = False
+
+        return is_valid
+
+    def migrate(self, *args):
+        """
+        Migrate the given uploads.
+
+        It takes upload 'id's as args. Alternatively takes absolute paths to uploads.
+        It tries to be as flexible as possible with those 'id's: looking at all
+        configured sites, dealing with extracted and tarred/zipped uploads, dealing
+        with paths to files and directories.
+
+        Requires a build :func:`index` to look for existing data in the source db. This
+        will be used to add user (and other, PID, ...) metadata and validate calculations.
+
+        Uses PIDs of identified old calculations. Will create new PIDs for previously
+        unknown uploads. New PIDs will be choosed from a `prefix++` range of ints.
+
+        Returns: Yields a dictionary with status and statistics for each given upload.
+        """
+
+        upload_specs = args
+        for upload_spec in upload_specs:
+            # identify upload
+            upload_path = None
+            abs_upload_path = os.path.abspath(upload_spec)
+            if os.path.exists(abs_upload_path):
+                upload_path = upload_spec
+            else:
+                for site in self.sites:
+                    potential_upload_path = os.path.join(site, upload_spec)
+                    if os.path.exists(potential_upload_path):
+                        upload_path = potential_upload_path
+                        break
+
+            if upload_path is None:
+                error = 'upload does not exist'
+                self.logger.error(error, upload_spec=upload_spec)
+                yield dict(status=FAILURE, error=error)
+                continue
+
+            # prepare the upload by determining/creating an upload file, name, source upload id
+            if os.path.isfile(upload_path):
+                upload_archive_f = open(upload_path, 'rb')
+                source_upload_id = os.path.split(os.path.split(upload_path)[0])[1]
+                upload_name = os.path.basename(upload_path)
+            else:
+                potential_upload_archive = os.path.join(
+                    upload_path, NomadCOEMigration.archive_filename)
+                if os.path.isfile(potential_upload_archive):
+                    upload_archive_f = open(potential_upload_archive, 'rb')
+                    source_upload_id = os.path.split(os.path.split(potential_upload_archive)[0])[1]
+                    upload_name = os.path.basename(potential_upload_archive)
+                else:
+                    source_upload_id = os.path.split(upload_path)[1]
+                    zip_file = zipstream.ZipFile()
+                    path_prefix = len(upload_path) + 1
+                    for root, _, files in os.walk(upload_path):
+                        for file in files:
+                            zip_file.write(
+                                os.path.join(root, file),
+                                os.path.join(root[path_prefix:], file),
+                                zipfile.ZIP_DEFLATED)
+                    zip_file.write(upload_path)
+                    upload_archive_f = IterIO(zip_file)
+                    upload_name = '%s.zip' % source_upload_id
+
+            # upload and process the upload file
+            upload = self.client.uploads.upload(
+                file=upload_archive_f, name=upload_name).response().result
+            upload_archive_f.close()
+
+            upload_logger = self.logger.bind(
+                source_upload_id=source_upload_id, upload_id=upload.upload_id)
+
+            # grab source metadata
+            upload_metadata_calcs = list()
+            metadata_dict = dict()
+            upload_metadata = dict(calculations=upload_metadata_calcs)
+            for source_calc in SourceCalc.objects(upload=source_upload_id):
+                source_metadata = CalcWithMetadata(upload_id=upload.upload_id, **source_calc.metadata)
+                source_metadata.mainfile = source_calc.mainfile
+                source_metadata.pid = source_calc.pid
+                source_metadata.__migrated = False
+                upload_metadata_calcs.append(source_metadata)
+                metadata_dict[source_calc.mainfile] = source_metadata
+
+            report = utils.POPO()
+            report.total_source_calcs = len(metadata_dict)
+            report.failed_calcs = 0
+            report.migrated_calcs = 0
+            report.calcs_with_diffs = 0
+            report.new_calcs = 0
+            report.missing_calcs = 0
+
+            # wait for complete upload
+            while upload.tasks_running:
+                upload = self.client.uploads.get_upload(upload_id=upload.upload_id).response().result
+                time.sleep(0.1)
+
+            if upload.tasks_status == FAILURE:
+                error = 'failed to process upload'
+                report.missing_calcs = report.total_source_calcs
+                report.total_calcs = 0
+                upload_logger.error(error, process_errors=upload.errors)
+                yield report
+                continue
+            else:
+                report.total_calcs = upload.calcs.pagination.total
+
+            # verify upload
+            for page in range(1, math.ceil(report.total_calcs / 100) + 1):
+                upload = self.client.uploads.get_upload(
+                    upload_id=upload.upload_id, per_page=100, page=page,
+                    order_by='mainfile').response().result
+
+                for calc_proc in upload.calcs.results:
+                    calc_logger = upload_logger.bind(
+                        calc_id=calc_proc.calc_id,
+                        mainfile=calc_proc.mainfile)
+
+                    source_calc = metadata_dict.get(calc_proc.mainfile, None)
+                    if calc_proc.tasks_status == SUCCESS:
+                        if source_calc is None:
+                            calc_logger.info('processed a calc that has no source')
+                            report.new_calcs += 1
+                            continue
+                        else:
+                            source_calc.__migrated = True
+                            report.migrated_calcs += 1
+
+                        if not self._validate(
+                                upload.upload_id, calc_proc.calc_id, source_calc, calc_logger):
+                            report.calcs_with_diffs += 1
+                    else:
+                        report.failed_calcs += 1
+                        calc_logger.error(
+                            'could not process a calc', process_errors=calc_proc.errors)
+                        continue
+
+            for source_calc in upload_metadata_calcs:
+                if source_calc.__migrated is False:
+                    report.missing_calcs += 1
+                    upload_logger.info(
+                        'no match or processed calc for source calc',
+                        mainfile=source_calc.mainfile)
+
+            # commit upload
+            admin_keys = ['upload_time, uploader, pid']
+
+            def transform(calcWithMetadata):
+                result = dict()
+                for key, value in calcWithMetadata.items():
+                    if key in admin_keys:
+                        target_key = '_%s' % key
+                    else:
+                        target_key = key
+
+                    if isinstance(value, datetime):
+                        value = value.isoformat()
+                    result[target_key] = value
+                return result
+
+            upload_metadata['calculations'] = [
+                transform(calc) for calc in upload_metadata['calculations']
+                if calc.__migrated]
+
+            if report.total_calcs > report.failed_calcs:
+                upload = self.client.uploads.exec_upload_command(
+                    upload_id=upload.upload_id,
+                    payload=dict(command='commit', metadata=upload_metadata)
+                ).response().result
+
+                while upload.process_running:
+                    try:
+                        upload = self.client.uploads.get_upload(
+                            upload_id=upload.upload_id).response().result
+                        time.sleep(0.1)
+                    except HTTPNotFound:
+                        # the proc upload will be deleted by the commit command
+                        break
+
+            # report
+            upload_logger.info('migrated upload', **report)
+            yield report
+
+    def index(self, *args, **kwargs):
+        """ see :func:`SourceCalc.index` """
+        return SourceCalc.index(self.source, *args, **kwargs)
diff --git a/nomad/normalizing/__init__.py b/nomad/normalizing/__init__.py
index f05dd190cead7b297e8a0e392ef99889c4f7bdad..f9ed902e81a9ae880820e23419c27c596f155693 100644
--- a/nomad/normalizing/__init__.py
+++ b/nomad/normalizing/__init__.py
@@ -16,6 +16,7 @@ from typing import List, Any
 from .normalizer import Normalizer
 from .system import SystemNormalizer
 from .fhiaims import FhiAimsBaseNormalizer
+from .repository import RepositoryNormalizer
 
 """
 After parsing calculations have to be normalized with a set of *normalizers*.
@@ -43,4 +44,5 @@ There is one ABC for all normalizer:
 normalizers: List[Any] = [
     SystemNormalizer,
     FhiAimsBaseNormalizer,
+    RepositoryNormalizer
 ]
diff --git a/nomad/normalizing/repository.py b/nomad/normalizing/repository.py
new file mode 100644
index 0000000000000000000000000000000000000000..72344e19c5f18b5374a44d23e1a4904fc699dc48
--- /dev/null
+++ b/nomad/normalizing/repository.py
@@ -0,0 +1,86 @@
+# Copyright 2018 Fawzi Mohamed, Danio Brambila, Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+
+from nomad.parsing import BadContextURI
+
+from .normalizer import Normalizer
+
+
+class RepositoryNormalizer(Normalizer):
+    """
+    The normalizer that turnes normalized parse results into a set of metadata
+    quantities for the repository.
+    """
+    xc_treatments = {
+        'gga': 'GGA',
+        'hf_': 'HF',
+        'oep': 'OEP',
+        'hyb': 'hybrid',
+        'mgga': 'meta-GGA',
+        'vdw': 'vdW',
+        'lda': 'LDA'
+    }
+    """ https://gitlab.mpcdf.mpg.de/nomad-lab/nomad-meta-info/wikis/metainfo/XC-functional """
+
+    version_re = re.compile(r'(\d+(\.\d+(\.\d+)?)?)')
+
+    def map_functional_name_to_xc_treatment(self, name):
+        return RepositoryNormalizer.xc_treatments.get(name[:3].lower(), name)
+
+    def simplify_version(self, version):
+        match = RepositoryNormalizer.version_re.search(version)
+        if match is None:
+            return version
+        else:
+            return match.group(0)
+
+    def normalize(self, logger=None) -> None:
+        super().normalize(logger)
+        b = self._backend
+
+        repository_info_context = '/section_repository_info/0'
+        try:
+            b.openContext(repository_info_context)
+        except BadContextURI:
+            b.openNonOverlappingSection('section_repository_info')
+            repository_info_context = None
+
+        b.openNonOverlappingSection('section_repository_parserdata')
+
+        b.addValue('repository_checksum', b.get_value('calc_hash', 0))
+        b.addValue('repository_chemical_formula', b.get_value('chemical_composition_bulk_reduced', 0))
+        b.addValue('repository_parser_id', b.get_value('parser_name', 0))
+        atom_labels = b.get_value('atom_labels', 0)
+        b.addValue('repository_atomic_elements', list(set(atom_labels)))
+        b.addValue('repository_atomic_elements_count', len(atom_labels))
+        b.addValue('repository_basis_set_type', b.get_value('program_basis_set_type', 0))
+        b.addValue('repository_crystal_system', b.get_value('crystal_system', 0))
+        b.addValue('repository_program_name', b.get_value('program_name', 0))
+        b.addValue(
+            'repository_code_version',
+            self.simplify_version(b.get_value('program_version', 0)))
+        b.addValue('repository_spacegroup_nr', b.get_value('space_group_number', 0))
+        b.addValue('repository_system_type', b.get_value('system_type', 0))
+        b.addValue(
+            'repository_xc_treatment',
+            self.map_functional_name_to_xc_treatment(b.get_value('XC_functional_name', 0)))
+
+        b.closeNonOverlappingSection('section_repository_parserdata')
+        if repository_info_context is None:
+            b.closeNonOverlappingSection('section_repository_info')
+        else:
+            b.closeContext(repository_info_context)
+        b.finishedParsingSession("ParseSuccess", None)
diff --git a/nomad/parsing/backend.py b/nomad/parsing/backend.py
index 1811e29a79c1d1496ffd419be0fb70b07bc7ce82..707e597d25ea3e9f1e1c7ab205bda6e584e3bd57 100644
--- a/nomad/parsing/backend.py
+++ b/nomad/parsing/backend.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from typing import TextIO, Tuple, List, Any, Callable
+from typing import TextIO, Tuple, List, Any, Callable, Iterable
 from abc import ABCMeta, abstractmethod
 from io import StringIO
 import json
@@ -458,17 +458,19 @@ class LocalBackend(LegacyParserBackend):
         sections = self._delegate.results[meta_name]
         return [section.gIndex for section in sections]
 
-    @staticmethod
     def _write(
-            json_writer: JSONStreamWriter,
-            value: Any,
+            self, json_writer: JSONStreamWriter, value: Any,
             filter: Callable[[str, Any], Any] = None):
 
         if isinstance(value, list):
-            json_writer.open_array()
-            for item in value:
-                LocalBackend._write(json_writer, item, filter=filter)
-            json_writer.close_array()
+            if len(value) == 1 and isinstance(value[0], Section) and \
+                    not self._delegate.metaInfoEnv().infoKindEl(value[0].name).repeats:
+                self._write(json_writer, value[0], filter=filter)
+            else:
+                json_writer.open_array()
+                for item in value:
+                    self._write(json_writer, item, filter=filter)
+                json_writer.close_array()
 
         elif isinstance(value, Section):
             section = value
@@ -482,13 +484,35 @@ class LocalBackend(LegacyParserBackend):
 
                 if value is not None:
                     json_writer.key(name)
-                    LocalBackend._write(json_writer, value, filter=filter)
+                    self._write(json_writer, value, filter=filter)
 
             json_writer.close_object()
 
         else:
             json_writer.value(value)
 
+    def _obj(self, value: Any, filter: Callable[[str, Any], Any] = None) -> Any:
+        if isinstance(value, list):
+            if len(value) == 1 and isinstance(value[0], Section) and \
+                    not self._delegate.metaInfoEnv().infoKindEl(value[0].name).repeats:
+                return self._obj(value[0], filter=filter)
+            else:
+                return [self._obj(item, filter=filter) for item in value]
+
+        elif isinstance(value, Section):
+            section = value
+            obj = dict(_name=section.name, _gIndex=section.gIndex)
+            for name, value in section.items():
+                if filter is not None:
+                    value = filter(name, value)
+
+                if value is not None:
+                    obj[name] = self._obj(value, filter=filter)
+            return obj
+
+        else:
+            return JSONStreamWriter._json_serializable_value(value)
+
     @property
     def status(self) -> ParserStatus:
         """ Returns status and potential errors. """
@@ -499,6 +523,18 @@ class LocalBackend(LegacyParserBackend):
         self._errors = None
         self._warnings: List[str] = []
 
+    def metadata(self, sections: Iterable[str] = ['section_calculation_info', 'section_repository_info']) -> dict:
+        """
+        Returns an json serializable object with all data of the given sections.
+
+        Used to create the repository view of a parsed and normalized calculation.
+        TODO this is probably not the right spot for this functionality!
+        """
+        data = dict(calc_id=self.get_value('calc_id'))
+        for section in sections:
+            data[section] = self._obj(self._delegate.results[section], lambda name, value: value if name != 'archive_processor_warnings' else None)
+        return data
+
     def write_json(self, out: TextIO, pretty=True, filter: Callable[[str, Any], Any] = None):
         """
         Writes the results stored in the backend after parsing in an 'archive'.json
@@ -513,12 +549,9 @@ class LocalBackend(LegacyParserBackend):
         json_writer.open_object()
 
         # TODO the root sections should be determined programatically
-        for root_section in ['section_run', 'section_calculation_info']:
+        for root_section in ['section_run', 'section_calculation_info', 'section_repository_info']:
             json_writer.key(root_section)
-            json_writer.open_array()
-            for run in self._delegate.results[root_section]:
-                LocalBackend._write(json_writer, run, filter=filter)
-            json_writer.close_array()
+            self._write(json_writer, self._delegate.results[root_section], filter=filter)
 
         json_writer.close_object()
         json_writer.close()
diff --git a/nomad/processing/__init__.py b/nomad/processing/__init__.py
index 7320734c7a9e9a8a57a520cc7c8a23d2d9de7ca3..13c1dcb20c704c7c0dc3e4e1aeda369ee07dd57c 100644
--- a/nomad/processing/__init__.py
+++ b/nomad/processing/__init__.py
@@ -67,5 +67,6 @@ classes do represent the processing state, as well as the respective entity.
     :members:
 """
 
-from nomad.processing.base import app, InvalidId, ProcNotRegistered, SUCCESS, FAILURE, RUNNING, PENDING
-from nomad.processing.data import Upload, Calc, NotAllowedDuringProcessing
+from nomad.processing.base import app, InvalidId, ProcNotRegistered, SUCCESS, FAILURE, \
+    RUNNING, PENDING, PROCESS_COMPLETED, PROCESS_RUNNING, ProcessAlreadyRunning
+from nomad.processing.data import Upload, Calc
diff --git a/nomad/processing/base.py b/nomad/processing/base.py
index d00d782e90e8c4ddf894c13c2b5c7591ed6e7e9a..e2d9f9c48a1971052bb16bde52adf31fd044c1bf 100644
--- a/nomad/processing/base.py
+++ b/nomad/processing/base.py
@@ -47,11 +47,16 @@ def setup(**kwargs):
 app = Celery('nomad.processing', broker=config.celery.broker_url)
 app.conf.update(worker_hijack_root_logger=False)
 
+CREATED = 'CREATED'
 PENDING = 'PENDING'
 RUNNING = 'RUNNING'
 FAILURE = 'FAILURE'
 SUCCESS = 'SUCCESS'
 
+PROCESS_CALLED = 'CALLED'
+PROCESS_RUNNING = 'RUNNING'
+PROCESS_COMPLETED = 'COMPLETED'
+
 
 class InvalidId(Exception): pass
 
@@ -59,6 +64,9 @@ class InvalidId(Exception): pass
 class ProcNotRegistered(Exception): pass
 
 
+class ProcessAlreadyRunning(Exception): pass
+
+
 class ProcMetaclass(TopLevelDocumentMetaclass):
     def __new__(cls, name, bases, attrs):
         cls = super().__new__(cls, name, bases, attrs)
@@ -87,19 +95,20 @@ class Proc(Document, metaclass=ProcMetaclass):
 
     Processing state will be persistet at appropriate
     times and must not be persistet manually. All attributes are stored to mongodb.
-    The class allows to render into a JSON serializable dict via :attr:`json_dict`.
 
     Possible processing states are PENDING, RUNNING, FAILURE, and SUCCESS.
 
     Attributes:
         current_task: the currently running or last completed task
-        status: the overall status of the processing
+        tasks_status: the overall status of the processing
         errors: a list of errors that happened during processing. Error fail a processing
             run
         warnings: a list of warnings that happened during processing. Warnings do not
             fail a processing run
         create_time: the time of creation (not the start of processing)
-        proc_time: the time that processing completed (successfully or not)
+        complete_time: the time that processing completed (successfully or not)
+        current_process: the currently or last run asyncronous process
+        process_status: the status of the currently or last run asyncronous process
     """
 
     meta: Any = {
@@ -110,37 +119,43 @@ class Proc(Document, metaclass=ProcMetaclass):
     """ the ordered list of tasks that comprise a processing run """
 
     current_task = StringField(default=None)
-    status = StringField(default='CREATED')
+    tasks_status = StringField(default=CREATED)
+    create_time = DateTimeField(required=True)
+    complete_time = DateTimeField()
 
     errors = ListField(StringField())
     warnings = ListField(StringField())
 
-    create_time = DateTimeField(required=True)
-    complete_time = DateTimeField()
-
-    _async_status = StringField(default='UNCALLED')
+    current_process = StringField(default=None)
+    process_status = StringField(default=None)
 
     @property
-    def completed(self) -> bool:
+    def tasks_running(self) -> bool:
         """ Returns True of the process has failed or succeeded. """
-        return self.status in [SUCCESS, FAILURE]
+        return self.tasks_status not in [SUCCESS, FAILURE]
+
+    @property
+    def process_running(self) -> bool:
+        """ Returns True of an asynchrounous process is currently running. """
+        return self.process_status is not None and self.process_status != PROCESS_COMPLETED
 
     def get_logger(self):
         return utils.get_logger(
-            'nomad.processing', current_task=self.current_task, process=self.__class__.__name__,
-            status=self.status)
+            'nomad.processing', current_task=self.current_task, proc=self.__class__.__name__,
+            current_process=self.current_process, process_status=self.process_status,
+            tasks_status=self.tasks_status)
 
     @classmethod
     def create(cls, **kwargs):
         """ Factory method that must be used instead of regular constructor. """
         assert cls.tasks is not None and len(cls.tasks) > 0, \
             """ the class attribute tasks must be overwritten with an actual list """
-        assert 'status' not in kwargs, \
+        assert 'tasks_status' not in kwargs, \
             """ do not set the status manually, its managed """
 
         kwargs.setdefault('create_time', datetime.now())
         self = cls(**kwargs)
-        self.status = PENDING if self.current_task is None else RUNNING
+        self.tasks_status = PENDING if self.current_task is None else RUNNING
         self.save()
 
         return self
@@ -179,11 +194,11 @@ class Proc(Document, metaclass=ProcMetaclass):
 
     def fail(self, *errors, log_level=logging.ERROR, **kwargs):
         """ Allows to fail the process. Takes strings or exceptions as args. """
-        assert not self.completed, 'Cannot fail a completed process.'
+        assert self.tasks_running, 'Cannot fail a completed process.'
 
         failed_with_exception = False
 
-        self.status = FAILURE
+        self.tasks_status = FAILURE
 
         logger = self.get_logger(**kwargs)
         for error in errors:
@@ -204,7 +219,7 @@ class Proc(Document, metaclass=ProcMetaclass):
 
     def warning(self, *warnings, log_level=logging.WARNING, **kwargs):
         """ Allows to save warnings. Takes strings or exceptions as args. """
-        assert not self.completed
+        assert self.tasks_running
 
         logger = self.get_logger(**kwargs)
 
@@ -222,13 +237,13 @@ class Proc(Document, metaclass=ProcMetaclass):
             assert tasks.index(task) == tasks.index(self.current_task) + 1, \
                 "tasks must be processed in the right order"
 
-        if self.status == FAILURE:
+        if self.tasks_status == FAILURE:
             return False
 
-        if self.status == PENDING:
+        if self.tasks_status == PENDING:
             assert self.current_task is None
             assert task == tasks[0]  # pylint: disable=E1136
-            self.status = RUNNING
+            self.tasks_status = RUNNING
             self.current_task = task
             self.get_logger().info('started process')
         else:
@@ -239,9 +254,10 @@ class Proc(Document, metaclass=ProcMetaclass):
         return True
 
     def _complete(self):
-        if self.status != FAILURE:
-            assert self.status == RUNNING, 'Can only complete a running process.'
-            self.status = SUCCESS
+        if self.tasks_status != FAILURE:
+            assert self.tasks_status == RUNNING, 'Can only complete a running process, process is %s' % self.tasks_status
+            self.tasks_status = SUCCESS
+            self.complete_time = datetime.now()
             self.save()
             self.get_logger().info('completed process')
 
@@ -250,26 +266,10 @@ class Proc(Document, metaclass=ProcMetaclass):
         Reloads the process constantly until it sees a completed process. Should be
         used with care as it can block indefinitely. Just intended for testing purposes.
         """
-        while not self.completed:
+        while self.tasks_running:
             time.sleep(interval)
             self.reload()
 
-    @property
-    def json_dict(self) -> dict:
-        """ A json serializable dictionary representation. """
-        data = {
-            'tasks': getattr(self.__class__, 'tasks'),
-            'current_task': self.current_task,
-            'status': self.status,
-            'completed': self.completed,
-            'errors': self.errors,
-            'warnings': self.warnings,
-            'create_time': self.create_time.isoformat() if self.create_time is not None else None,
-            'complete_time': self.complete_time.isoformat() if self.complete_time is not None else None,
-            '_async_status': self._async_status
-        }
-        return {key: value for key, value in data.items() if value is not None}
-
 
 class InvalidChordUsage(Exception): pass
 
@@ -289,6 +289,10 @@ class Chord(Proc):
     TODO it is vital that sub classes and children don't miss any calls. This might
     not be practical, because in reality processes might even fail to fail.
 
+    TODO in the current upload processing, the join functionality is not strictly necessary.
+    Nothing is done after join. We only need it to report the upload completed on API
+    request. We could check the join condition on each of thise API queries.
+
     Attributes:
         total_children (int): the number of spawed children, -1 denotes that number was not
             saved yet
@@ -325,7 +329,7 @@ class Chord(Proc):
         total_children, joined = others
 
         self.get_logger().debug(
-            'Check for join', total_children=total_children,
+            'check for join', total_children=total_children,
             completed_children=completed_children, joined=joined)
 
         # check the join condition and raise errors if chord is in bad state
@@ -334,11 +338,11 @@ class Chord(Proc):
                 self.join()
                 self.joined = True
                 self.modify(joined=self.joined)
-                self.get_logger().debug('Chord is joined')
+                self.get_logger().debug('chord is joined')
             else:
-                raise InvalidChordUsage('Chord cannot be joined twice.')
+                raise InvalidChordUsage('chord cannot be joined twice.')
         elif completed_children > total_children and total_children != -1:
-            raise InvalidChordUsage('Chord counter is out of limits.')
+            raise InvalidChordUsage('chord counter is out of limits.')
 
     def join(self):
         """ Subclasses might overwrite to do something after all children have completed. """
@@ -384,7 +388,7 @@ def task(func):
     only be executed, if the process has not yet reached FAILURE state.
     """
     def wrapper(self, *args, **kwargs):
-        if self.status == 'FAILURE':
+        if self.tasks_status == FAILURE:
             return
 
         self._continue_with(func.__name__)
@@ -393,7 +397,7 @@ def task(func):
         except Exception as e:
             self.fail(e)
 
-        if self.__class__.tasks[-1] == self.current_task and not self.completed:
+        if self.__class__.tasks[-1] == self.current_task and self.tasks_running:
             self._complete()
 
     setattr(wrapper, '__task_name', func.__name__)
@@ -432,21 +436,28 @@ def proc_task(task, cls_name, self_id, func_attr):
         cls = all_proc_cls.get(cls_name, None)
 
     if cls is None:
-        logger.error('document not a subcass of Proc')
+        logger.critical('document not a subcass of Proc')
         raise ProcNotRegistered('document %s not a subclass of Proc' % cls_name)
 
     # get the process instance
     try:
-        self = cls.get(self_id)
-    except KeyError as e:
-        logger.warning('called object is missing')
-        raise task.retry(exc=e, countdown=3)
+        try:
+            self = cls.get(self_id)
+        except KeyError as e:
+            logger.warning('called object is missing')
+            raise task.retry(exc=e, countdown=3)
+    except KeyError:
+        logger.critical('called object is missing, retries exeeded')
+
+    logger = self.get_logger()
 
     # get the process function
     func = getattr(self, func_attr, None)
     if func is None:
         logger.error('called function not a function of proc class')
         self.fail('called function %s is not a function of proc class %s' % (func_attr, cls_name))
+        self.process_status = PROCESS_COMPLETED
+        self.save()
         return
 
     # unwrap the process decorator
@@ -454,14 +465,21 @@ def proc_task(task, cls_name, self_id, func_attr):
     if func is None:
         logger.error('called function was not decorated with @process')
         self.fail('called function %s was not decorated with @process' % func_attr)
+        self.process_status = PROCESS_COMPLETED
+        self.save()
         return
 
     # call the process function
+    deleted = False
     try:
-        self._async_status = 'RECEIVED-%s' % func.__name__
-        func(self)
+        self.process_status = PROCESS_RUNNING
+        deleted = func(self)
     except Exception as e:
         self.fail(e)
+    finally:
+        if deleted is None or not deleted:
+            self.process_status = PROCESS_COMPLETED
+            self.save()
 
 
 def process(func):
@@ -469,11 +487,17 @@ def process(func):
     The decorator for process functions that will be called async via celery.
     All calls to the decorated method will result in celery task requests.
     To transfer state, the instance will be saved to the database and loading on
-    the celery task worker. Process methods can call other (process) functions/methods.
+    the celery task worker. Process methods can call other (process) functions/methods on
+    other :class:`Proc` instances. Each :class:`Proc` instance can only run one
+    asny process at a time.
     """
     def wrapper(self, *args, **kwargs):
         assert len(args) == 0 and len(kwargs) == 0, 'process functions must not have arguments'
-        self._async_status = 'CALLED-%s' % func.__name__
+        if self.process_running:
+            raise ProcessAlreadyRunning
+
+        self.current_process = func.__name__
+        self.process_status = PROCESS_CALLED
         self.save()
 
         self_id = self.id.__str__()
diff --git a/nomad/processing/data.py b/nomad/processing/data.py
index 31d5255588bf89ccea641f7b10d93b0dc260b217..9ddd11b05e431c0e3de9f285fb6cf947b366ddd8 100644
--- a/nomad/processing/data.py
+++ b/nomad/processing/data.py
@@ -25,28 +25,19 @@ calculations, and files
 """
 
 from typing import List, Any, ContextManager, Tuple, Generator
-from datetime import datetime
-from elasticsearch.exceptions import NotFoundError
-from mongoengine import StringField, BooleanField, DateTimeField, DictField, IntField
+from mongoengine import StringField, DateTimeField, DictField
 import logging
-import base64
-import time
 from structlog import wrap_logger
 from contextlib import contextmanager
 
-from nomad import config, utils, coe_repo
-from nomad.files import UploadFile, ArchiveFile, ArchiveLogFile, File
-from nomad.repo import RepoCalc
-from nomad.processing.base import Proc, Chord, process, task, PENDING, SUCCESS, FAILURE, RUNNING
+from nomad import utils, coe_repo, datamodel
+from nomad.files import PathObject, ArchiveBasedStagingUploadFiles, ExtractError, Calc as FilesCalc
+from nomad.processing.base import Proc, Chord, process, task, PENDING, SUCCESS, FAILURE
 from nomad.parsing import parsers, parser_dict
 from nomad.normalizing import normalizers
-from nomad.utils import lnr
 
 
-class NotAllowedDuringProcessing(Exception): pass
-
-
-class Calc(Proc):
+class Calc(Proc, datamodel.Calc):
     """
     Instances of this class represent calculations. This class manages the elastic
     search index entry, files, and archive for the respective calculation.
@@ -57,39 +48,37 @@ class Calc(Proc):
     while parsing, including ``program_name``, ``program_version``, etc.
 
     Attributes:
-        archive_id: the hash based archive id of the calc
+        calc_id: the calc_id of this calc
         parser: the name of the parser used to process this calc
         upload_id: the id of the upload used to create this calculation
         mainfile: the mainfile (including path in upload) that was used to create this calc
-        mainfile_tmp_path: path to the mainfile extracted for processing
     """
-    archive_id = StringField(primary_key=True)
+    calc_id = StringField(primary_key=True)
     upload_id = StringField()
     mainfile = StringField()
     parser = StringField()
-    mainfile_tmp_path = StringField()
 
     meta: Any = {
         'indices': [
-            'upload_id', 'mainfile', 'code', 'parser', 'status'
+            'upload_id', 'mainfile', 'code', 'parser', 'tasks_status'
         ]
     }
 
     def __init__(self, *args, **kwargs):
         super().__init__(*args, **kwargs)
         self._parser_backend = None
-        self._upload = None
+        self._upload: Upload = None
+        self._upload_files: ArchiveBasedStagingUploadFiles = None
         self._calc_proc_logwriter = None
-        self._calc_proc_logfile = None
         self._calc_proc_logwriter_ctx: ContextManager = None
 
     @classmethod
     def get(cls, id):
-        return cls.get_by_id(id, 'archive_id')
+        return cls.get_by_id(id, 'calc_id')
 
     @property
-    def mainfile_file(self) -> File:
-        return File(self.mainfile_tmp_path)
+    def mainfile_file(self) -> PathObject:
+        return self.upload_files.raw_file_object(self.mainfile)
 
     @property
     def upload(self) -> 'Upload':
@@ -97,34 +86,16 @@ class Calc(Proc):
             self._upload = Upload.get(self.upload_id)
         return self._upload
 
-    def delete(self):
-        """
-        Delete this calculation and all associated data. This includes all files,
-        the archive, and this search index entry.
-        TODO is this needed? Or do we always delete hole uploads in bulk.
-        """
-        # delete the archive
-        if self.archive_id is not None:
-            ArchiveFile(self.archive_id).delete()
-
-        # delete the search index entry
-        try:
-            elastic_entry = RepoCalc.get(self.archive_id)
-            if elastic_entry is not None:
-                elastic_entry.delete()
-        except NotFoundError:
-            pass
-
-        # delete this mongo document
-        super().delete()
+    @property
+    def upload_files(self) -> ArchiveBasedStagingUploadFiles:
+        if not self._upload_files:
+            self._upload_files = ArchiveBasedStagingUploadFiles(self.upload_id, is_authorized=lambda: True, local_path=self.upload.local_path)
+        return self._upload_files
 
     def get_logger(self, **kwargs):
-        upload_hash, calc_hash = self.archive_id.split('/')
         logger = super().get_logger()
         logger = logger.bind(
-            upload_id=self.upload_id, mainfile=self.mainfile,
-            upload_hash=upload_hash, calc_hash=calc_hash,
-            archive_id=self.archive_id, **kwargs)
+            upload_id=self.upload_id, mainfile=self.mainfile, calc_id=self.calc_id, **kwargs)
 
         return logger
 
@@ -136,8 +107,7 @@ class Calc(Proc):
         logger = self.get_logger(**kwargs)
 
         if self._calc_proc_logwriter is None:
-            self._calc_proc_logfile = ArchiveLogFile(self.archive_id)
-            self._calc_proc_logwriter_ctx = self._calc_proc_logfile.open('wt')
+            self._calc_proc_logwriter_ctx = self.upload_files.archive_log_file(self.calc_id, 'wt')
             self._calc_proc_logwriter = self._calc_proc_logwriter_ctx.__enter__()  # pylint: disable=E1101
 
         def save_to_calc_log(logger, method_name, event_dict):
@@ -154,20 +124,8 @@ class Calc(Proc):
 
         return wrap_logger(logger, processors=[save_to_calc_log])
 
-    @property
-    def json_dict(self):
-        """ A json serializable dictionary representation. """
-        data = {
-            'archive_id': self.archive_id,
-            'mainfile': self.mainfile,
-            'upload_id': self.upload_id,
-            'parser': self.parser
-        }
-        data.update(super().json_dict)
-        return {key: value for key, value in data.items() if value is not None}
-
     @process
-    def process(self):
+    def process_calc(self):
         logger = self.get_logger()
         if self.upload is None:
             logger.error('calculation upload does not exist')
@@ -195,11 +153,13 @@ class Calc(Proc):
         parser = parser_dict[self.parser]
 
         with utils.timer(logger, 'parser executed', input_size=self.mainfile_file.size):
-            self._parser_backend = parser.run(self.mainfile_tmp_path, logger=logger)
+            self._parser_backend = parser.run(
+                self.upload_files.raw_file_object(self.mainfile).os_path, logger=logger)
 
         self._parser_backend.openNonOverlappingSection('section_calculation_info')
         self._parser_backend.addValue('upload_id', self.upload_id)
-        self._parser_backend.addValue('archive_id', self.archive_id)
+        self._parser_backend.addValue('calc_id', self.calc_id)
+        self._parser_backend.addValue('calc_hash', self.upload_files.calc_hash(self.mainfile))
         self._parser_backend.addValue('main_file', self.mainfile)
         self._parser_backend.addValue('parser_name', self.parser)
 
@@ -213,6 +173,12 @@ class Calc(Proc):
 
         self._parser_backend.closeNonOverlappingSection('section_calculation_info')
 
+        self._parser_backend.openNonOverlappingSection('section_repository_info')
+        self._parser_backend.addValue('repository_archive_gid', '%s/%s' % (self.upload_id, self.calc_id))
+        self._parser_backend.addValue(
+            'repository_filepaths', self.upload_files.calc_files(self.mainfile))
+        self._parser_backend.closeNonOverlappingSection('section_repository_info')
+
         self.add_processor_info(self.parser)
 
     @contextmanager
@@ -235,7 +201,7 @@ class Calc(Proc):
             else:
                 self._parser_backend.addValue('archive_processor_status', 'Success')
         else:
-            errors = self._parser_backend.status[1]
+            errors = self._parser_backend.tasks_status[1]
             self._parser_backend.addValue('archive_processor_error', str(errors))
 
         self._parser_backend.closeNonOverlappingSection('section_archive_processing_info')
@@ -267,34 +233,18 @@ class Calc(Proc):
     def archiving(self):
         logger = self.get_logger()
 
-        upload_hash, calc_hash = self.archive_id.split('/')
-        additional = dict(
-            mainfile=self.mainfile,
-            upload_time=self.upload.upload_time,
-            staging=True,
-            restricted=False,
-            user_id=self.upload.user_id,
-            aux_files=list(self.upload.upload_file.get_siblings(self.mainfile)))
-
+        # persist the repository metadata
         with utils.timer(logger, 'indexed', step='index'):
-            # persist to elastic search
-            RepoCalc.create_from_backend(
-                self._parser_backend,
-                additional=additional,
-                upload_hash=upload_hash,
-                calc_hash=calc_hash,
-                upload_id=self.upload_id).persist()
+            self.upload_files.metadata.insert(self._parser_backend.metadata())
 
+        # persist the archive
         with utils.timer(
                 logger, 'archived', step='archive',
                 input_size=self.mainfile_file.size) as log_data:
-
-            # persist the archive
-            archive_file = ArchiveFile(self.archive_id)
-            with archive_file.write_archive_json() as out:
+            with self.upload_files.archive_file(self.calc_id, 'wt') as out:
                 self._parser_backend.write_json(out, pretty=True)
 
-            log_data.update(archive_size=archive_file.size)
+            log_data.update(archive_size=self.upload_files.archive_file_object(self.calc_id).size)
 
         # close loghandler
         if self._calc_proc_logwriter is not None:
@@ -304,10 +254,16 @@ class Calc(Proc):
                 self._calc_proc_logwriter_ctx.__exit__(None, None, None)  # pylint: disable=E1101
                 self._calc_proc_logwriter = None
 
-                log_data.update(log_size=self._calc_proc_logfile.size)
+                log_data.update(log_size=self.upload_files.archive_log_file_object(self.calc_id).size)
 
+    def to_calc_with_metadata(self):
+        return self.to(FilesCalc).to_calc_with_metadata()
 
-class Upload(Chord):
+
+datamodel.CalcWithMetadata.register_mapping(Calc, Calc.to_calc_with_metadata)
+
+
+class Upload(Chord, datamodel.Upload):
     """
     Represents uploads in the databases. Provides persistence access to the files storage,
     and processing state.
@@ -315,12 +271,9 @@ class Upload(Chord):
     Attributes:
         name: optional user provided upload name
         local_path: optional local path, e.g. for files that are already somewhere on the server
-        additional_metadata: optional user provided additional meta data
+        metadata: optional user provided additional meta data
         upload_id: the upload id generated by the database
-        in_staging: true if the upload is still in staging and can be edited by the uploader
-        is_private: true if the upload and its derivitaves are only visible to the uploader
         upload_time: the timestamp when the system realised the upload
-        upload_hash: the hash of the uploaded file
         user_id: the id of the user that created this upload
     """
     id_field = 'upload_id'
@@ -329,31 +282,19 @@ class Upload(Chord):
 
     name = StringField(default=None)
     local_path = StringField(default=None)
-    additional_metadata = DictField(default=None)
-
-    in_staging = BooleanField(default=True)
-    is_private = BooleanField(default=False)
-
+    metadata = DictField(default=None)
     upload_time = DateTimeField()
-    upload_hash = StringField(default=None)
-
     user_id = StringField(required=True)
-    upload_url = StringField(default=None)
-    upload_command = StringField(default=None)
-
-    coe_repo_upload_id = IntField(default=None)
-
-    _initiated_parsers = IntField(default=-1)
 
     meta: Any = {
         'indexes': [
-            'upload_hash', 'user_id', 'status'
+            'user_id', 'tasks_status'
         ]
     }
 
     def __init__(self, **kwargs):
         super().__init__(**kwargs)
-        self._upload_file = None
+        self._upload_files: ArchiveBasedStagingUploadFiles = None
 
     @classmethod
     def get(cls, id):
@@ -362,50 +303,17 @@ class Upload(Chord):
     @classmethod
     def user_uploads(cls, user: coe_repo.User) -> List['Upload']:
         """ Returns all uploads for the given user. Currently returns all uploads. """
-        return cls.objects(user_id=str(user.user_id), in_staging=True)
+        return cls.objects(user_id=str(user.user_id))
+
+    @property
+    def uploader(self):
+        return coe_repo.User.from_user_id(self.user_id)
 
     def get_logger(self, **kwargs):
         logger = super().get_logger()
         logger = logger.bind(upload_id=self.upload_id, **kwargs)
         return logger
 
-    def delete(self):
-        logger = self.get_logger(task='delete')
-
-        if not (self.completed or self.is_stale or self.current_task == 'uploading'):
-            raise NotAllowedDuringProcessing()
-
-        with lnr(logger, 'delete upload file'):
-            try:
-                UploadFile(self.upload_id, local_path=self.local_path).delete()
-            except KeyError:
-                if self.current_task == 'uploading':
-                    logger.debug(
-                        'Upload exist, but file does not exist. '
-                        'It was probably aborted and deleted.')
-                else:
-                    logger.debug('Upload exist, but uploaded file does not exist.')
-
-        with lnr(logger, 'deleting calcs'):
-            # delete archive files
-            ArchiveFile.delete_archives(upload_hash=self.upload_hash)
-
-            # delete repo entries
-            RepoCalc.delete_upload(upload_id=self.upload_id)
-
-            # delete calc processings
-            Calc.objects(upload_id=self.upload_id).delete()
-
-        with lnr(logger, 'deleting upload'):
-            super().delete()
-
-    @classmethod
-    def _external_objects_url(cls, url):
-        """ Replaces the given internal object storage url with an URL that allows
-            external access.
-        """
-        return 'http://%s:%s%s%s' % (config.services.api_host, config.services.api_port, config.services.api_base_path, url)
-
     @classmethod
     def create(cls, **kwargs) -> 'Upload':
         """
@@ -423,49 +331,63 @@ class Upload(Chord):
         kwargs.update(user_id=str(user.user_id))
         self = super().create(**kwargs)
 
-        basic_auth_token = base64.b64encode(b'%s:' % user.get_auth_token()).decode('utf-8')
-
-        self.upload_url = cls._external_objects_url('/uploads/%s/file' % self.upload_id)
-        self.upload_command = 'curl -H "Authorization: Basic %s" "%s" --upload-file local_file' % (
-            basic_auth_token, self.upload_url)
-
         self._continue_with('uploading')
 
         return self
 
-    @property
-    def is_stale(self) -> bool:
-        if self.current_task == 'uploading' and self.upload_time is None:
-            return (datetime.now() - self.create_time).days > 1
-        else:
-            return False
+    def delete(self):
+        """ Deletes this upload process state entry and its calcs. """
+        Calc.objects(upload_id=self.upload_id).delete()
+        super().delete()
 
-    def unstage(self):
-        self.get_logger().info('unstage')
-        self.in_staging = False
-        RepoCalc.unstage(upload_id=self.upload_id)
-        coe_repo.add_upload(self, restricted=False)  # TODO allow users to choose restricted
-        self.save()
+    @process
+    def delete_upload(self):
+        """
+        Deletes of the upload, including its processing state and
+        staging files.
+        """
+        logger = self.get_logger()
 
-    @property
-    def json_dict(self) -> dict:
-        """ A json serializable dictionary representation. """
-        data = {
-            'name': self.name,
-            'local_path': self.local_path,
-            'additional_metadata': self.additional_metadata,
-            'upload_id': self.upload_id,
-            'upload_hash': self.upload_hash,
-            'upload_url': self.upload_url,
-            'upload_command': self.upload_command,
-            'upload_time': self.upload_time.isoformat() if self.upload_time is not None else None,
-            'is_stale': self.is_stale,
-        }
-        data.update(super().json_dict)
-        return {key: value for key, value in data.items() if value is not None}
+        with utils.lnr(logger, 'staged upload delete failed'):
+            with utils.timer(
+                    logger, 'staged upload deleted', step='delete',
+                    upload_size=self.upload_files.size):
+                self.upload_files.delete()
+                self.delete()
+
+        return True  # do not save the process status on the delete upload
+
+    @process
+    def commit_upload(self):
+        """
+        Moves the upload out of staging to add it to the coe repository. It will
+        pack the staging upload files in to public upload files, add entries to the
+        coe repository db and remove this instance and its calculation from the
+        processing state db.
+        """
+        logger = self.get_logger()
+
+        with utils.lnr(logger, 'commit failed'):
+            with utils.timer(
+                    logger, 'upload added to repository', step='commit',
+                    upload_size=self.upload_files.size):
+                coe_repo.Upload.add(self, self.metadata)
+
+            with utils.timer(
+                    logger, 'staged upload files packed', step='commit',
+                    upload_size=self.upload_files.size):
+                self.upload_files.pack()
+
+            with utils.timer(
+                    logger, 'staged upload deleted', step='commit',
+                    upload_size=self.upload_files.size):
+                self.upload_files.delete()
+                self.delete()
+
+        return True  # do not save the process status on the delete upload
 
     @process
-    def process(self):
+    def process_upload(self):
         self.extracting()
         self.parse_all()
 
@@ -474,11 +396,10 @@ class Upload(Chord):
         pass
 
     @property
-    def upload_file(self):
-        """ The :class:`UploadFile` instance that represents the uploaded file of this upload. """
-        if not self._upload_file:
-            self._upload_file = UploadFile(self.upload_id, local_path=self.local_path)
-        return self._upload_file
+    def upload_files(self) -> ArchiveBasedStagingUploadFiles:
+        if not self._upload_files:
+            self._upload_files = ArchiveBasedStagingUploadFiles(self.upload_id, is_authorized=lambda: True, local_path=self.local_path)
+        return self._upload_files
 
     @task
     def extracting(self):
@@ -492,25 +413,16 @@ class Upload(Chord):
         try:
             with utils.timer(
                     logger, 'upload extracted', step='extracting',
-                    upload_size=self.upload_file.size):
-                self.upload_file.extract()
-        except KeyError as e:
-            self.fail('process request for non existing upload', level=logging.INFO)
+                    upload_size=self.upload_files.size):
+                self.upload_files.extract()
+        except KeyError:
+            self.fail('processing requested for non existing upload', log_level=logging.ERROR)
             return
-
-        # create and save a hash for the upload
-        try:
-            self.upload_hash = self.upload_file.upload_hash()
-        except Exception as e:
-            self.fail('could not create upload hash', e)
+        except ExtractError:
+            self.fail('bad .zip/.tar file', log_level=logging.INFO)
             return
 
-        # check if the file was already uploaded and processed before
-        if RepoCalc.upload_exists(self.upload_hash):
-            self.fail('The same file was already uploaded and processed.', level=logging.INFO)
-            return
-
-    def match_mainfiles(self) -> Generator[Tuple[File, str, object], None, None]:
+    def match_mainfiles(self) -> Generator[Tuple[str, object], None, None]:
         """
         Generator function that matches all files in the upload to all parsers to
         determine the upload's mainfiles.
@@ -518,13 +430,12 @@ class Upload(Chord):
         Returns:
             Tuples of mainfile, filename, and parsers
         """
-        for filename in self.upload_file.filelist:
-            potential_mainfile = self.upload_file.get_file(filename)
+        for filename in self.upload_files.raw_file_manifest():
             for parser in parsers:
                 try:
-                    with potential_mainfile.open('r') as mainfile_f:
+                    with self.upload_files.raw_file(filename) as mainfile_f:
                         if parser.is_mainfile(filename, lambda fn: mainfile_f):
-                            yield potential_mainfile, filename, parser
+                            yield filename, parser
                 except Exception as e:
                     self.get_logger().error(
                         'exception while matching pot. mainfile',
@@ -541,17 +452,15 @@ class Upload(Chord):
         # TODO: deal with multiple possible parser specs
         with utils.timer(
                 logger, 'upload extracted', step='matching',
-                upload_size=self.upload_file.size,
-                upload_filecount=len(self.upload_file.filelist)):
+                upload_size=self.upload_files.size):
             total_calcs = 0
-            for mainfile, filename, parser in self.match_mainfiles():
+            for filename, parser in self.match_mainfiles():
                 calc = Calc.create(
-                    archive_id='%s/%s' % (self.upload_hash, utils.hash(filename)),
+                    calc_id=self.upload_files.calc_id(filename),
                     mainfile=filename, parser=parser.name,
-                    mainfile_tmp_path=mainfile.os_path,
                     upload_id=self.upload_id)
 
-                calc.process()
+                calc.process_calc()
                 total_calcs += 1
 
         # have to save the total_calcs information for chord management
@@ -562,26 +471,12 @@ class Upload(Chord):
 
     @task
     def cleanup(self):
-        try:
-            upload = UploadFile(self.upload_id, local_path=self.local_path)
-            with utils.timer(
-                    self.get_logger(), 'upload persisted', step='cleaning',
-                    upload_size=upload.size):
-                upload.persist()
-
-            with utils.timer(
-                    self.get_logger(), 'processing cleaned up', step='cleaning',
-                    upload_size=upload.size):
-                upload.remove_extract()
-        except KeyError as e:
-            self.fail('Upload does not exist', exc_info=e)
-            return
-
-        self.get_logger().debug('closed upload')
+        # nothing todo with the current processing setup
+        pass
 
     @property
     def processed_calcs(self):
-        return Calc.objects(upload_id=self.upload_id, status__in=[SUCCESS, FAILURE]).count()
+        return Calc.objects(upload_id=self.upload_id, tasks_status__in=[SUCCESS, FAILURE]).count()
 
     @property
     def total_calcs(self):
@@ -589,31 +484,15 @@ class Upload(Chord):
 
     @property
     def failed_calcs(self):
-        return Calc.objects(upload_id=self.upload_id, status=FAILURE).count()
+        return Calc.objects(upload_id=self.upload_id, tasks_status=FAILURE).count()
 
     @property
     def pending_calcs(self):
-        return Calc.objects(upload_id=self.upload_id, status=PENDING).count()
+        return Calc.objects(upload_id=self.upload_id, tasks_status=PENDING).count()
 
     def all_calcs(self, start, end, order_by='mainfile'):
         return Calc.objects(upload_id=self.upload_id)[start:end].order_by(order_by)
 
-    @staticmethod
-    def repair_all():
-        """
-        Utitlity function that will look for suspiciously looking conditions in
-        all uncompleted downloads. It ain't a perfect world.
-        """
-        # TODO this was added as a quick fix to #37.
-        # Even though it might be strictly necessary, there should be a tested backup
-        # solution for it Chords to not work properly due to failed to fail processings
-        uploads = Upload.objects(status__in=[PENDING, RUNNING])
-        for upload in uploads:
-            completed = upload.processed_calcs
-            total = upload.total
-            pending = upload.pending_calcs
-
-            if completed + pending == total:
-                time.sleep(2)
-                if pending == upload.pending_calcs:
-                    Calc.objects(upload_id=upload.upload_id, status=PENDING).delete()
+    @property
+    def calcs(self):
+        return Calc.objects(upload_id=self.upload_id, tasks_status=SUCCESS)
diff --git a/nomad/repo.py b/nomad/repo.py
deleted file mode 100644
index 50737e791246fc1229e3425c3c57c06fe28e4e3a..0000000000000000000000000000000000000000
--- a/nomad/repo.py
+++ /dev/null
@@ -1,235 +0,0 @@
-# Copyright 2018 Markus Scheidgen
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an"AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-This module is about maintaining the repository search index and providing all
-data to the repository related parts of nomad.
-
-We use *elasticsearch_dsl* to interface with elastic search. The class :class:`RepoCalc`
-is an elasticsearch_dsl document that is used to represent repository index entries.
-
-.. autoclass:: nomad.repo.RepoCalc
-        :members:
-"""
-
-from typing import Dict, Any
-from elasticsearch.exceptions import ConflictError, ConnectionTimeout
-from elasticsearch_dsl import Document as ElasticDocument, Search, Date, Keyword, Boolean
-from datetime import datetime
-import time
-
-from nomad import config, infrastructure
-from nomad.parsing import LocalBackend
-from nomad.utils import get_logger
-
-logger = get_logger(__name__)
-
-key_mappings = {
-    'basis_set_type': 'program_basis_set_type',
-    'chemical_composition': 'chemical_composition_bulk_reduced'
-}
-
-
-class AlreadyExists(Exception): pass
-
-
-class RepoCalc(ElasticDocument):
-    """
-    Elastic search document that represents a calculation. It is supposed to be a
-    component of :class:`Calc`. Should only be created by its parent :class:`Calc`
-    instance and only via the :func:`create_from_backend` factory method.
-    """
-    class Index:
-        name = config.elastic.index_name
-
-    calc_hash = Keyword()
-    mainfile = Keyword()
-    upload_hash = Keyword()
-    upload_id = Keyword()
-
-    upload_time = Date()
-
-    staging = Boolean()
-    restricted = Boolean()
-    user_id = Keyword()
-
-    program_name = Keyword()
-    program_version = Keyword()
-
-    chemical_composition = Keyword()
-    basis_set_type = Keyword()
-    atom_species = Keyword()
-    system_type = Keyword()
-    crystal_system = Keyword()
-    space_group_number = Keyword()
-    configuration_raw_gid = Keyword()
-    XC_functional_name = Keyword()
-
-    aux_files = Keyword()
-
-    @property
-    def archive_id(self) -> str:
-        """ The unique id for this calculation. """
-        return '%s/%s' % (self.upload_hash, self.calc_hash)
-
-    @classmethod
-    def create_from_backend(
-            cls, backend: LocalBackend, additional: Dict[str, Any],
-            upload_id: str, upload_hash: str, calc_hash: str) -> 'RepoCalc':
-        """
-        Create a new calculation instance in elastic search. The data from the given backend
-        will be used. Additional meta-data can be given as *kwargs*. ``upload_id``,
-        ``upload_hash``, and ``calc_hash`` are mandatory.
-
-        Arguments:
-            backend: The parsing/normalizing backend that contains the calculation data.
-            additional: Additional arguments not stored in the backend. E.g. ``user_id``,
-                ``staging``, ``restricted``
-            upload_hash: The upload hash of the originating upload.
-            upload_id: The upload id of the originating upload.
-            calc_hash: The upload unique hash for this calculation.
-
-        Returns:
-            The created instance.
-        """
-        assert upload_hash is not None and calc_hash is not None and upload_id is not None
-        additional.update(dict(upload_hash=upload_hash, calc_hash=calc_hash, upload_id=upload_id))
-
-        # prepare the entry with all necessary properties from the backend
-        calc = cls(meta=dict(id='%s/%s' % (upload_hash, calc_hash)))
-        for property in cls._doc_type.mapping:
-            mapped_property = key_mappings.get(property, property)
-
-            if mapped_property in additional:
-                value = additional[mapped_property]
-            else:
-                try:
-                    value = backend.get_value(mapped_property, 0)
-                    if value is None:
-                        raise KeyError
-                except KeyError:
-                    try:
-                        program_name = backend.get_value('program_name', 0)
-                    except KeyError:
-                        program_name = 'unknown'
-                    logger.warning(
-                        'Missing property value', property=mapped_property, upload_id=upload_id,
-                        upload_hash=upload_hash, calc_hash=calc_hash, code=program_name)
-                    continue
-
-            setattr(calc, property, value)
-
-        return calc
-
-    def persist(self, **kwargs):
-        """
-            Persist this entry to elastic search. Kwargs are passed to elastic search.
-
-            Raises:
-                AlreadyExists: If the calculation already exists in elastic search. We use
-                    the elastic document lock here. The elastic document is IDed via the
-                    ``archive_id``.
-        """
-        try:
-            # In practive es operation might fail due to timeout under heavy loads/
-            # bad configuration. Retries with a small delay is a pragmatic solution.
-            e_after_retries = None
-            for _ in range(0, 2):
-                try:
-                    self.save(op_type='create', **kwargs)
-                    e_after_retries = None
-                    break
-                except ConnectionTimeout as e:
-                    e_after_retries = e
-                    time.sleep(1)
-                except ConflictError as e:  # this should never happen, but happens
-                    e_after_retries = e
-                    time.sleep(1)
-                else:
-                    raise e
-            if e_after_retries is not None:
-                # if we had and exception and could not fix with retries, throw it
-                raise e_after_retries  # pylint: disable=E0702
-        except ConflictError:
-            raise AlreadyExists('Calculation %s does already exist.' % (self.archive_id))
-
-    @staticmethod
-    def delete_upload(upload_id):
-        """ Deletes all repo entries of the given upload. """
-        RepoCalc.search().query('match', upload_id=upload_id).delete()
-
-    @classmethod
-    def unstage(cls, upload_id, staging=False):
-        """ Update the staging property for all repo entries of the given upload. """
-        cls.update_by_query(upload_id, {
-            'inline': 'ctx._source.staging=%s' % ('true' if staging else 'false'),
-            'lang': 'painless'
-        })
-
-    @classmethod
-    def update_upload(cls, upload_id, **kwargs):
-        """ Update all entries of given upload with keyword args. """
-        for calc in RepoCalc.search().query('match', upload_id=upload_id):
-            calc.update(**kwargs)
-
-    @classmethod
-    def update_by_query(cls, upload_id, script):
-        """ Update all entries of a given upload via elastic script. """
-        index = cls._default_index()
-        doc_type = cls._doc_type.name
-        conn = cls._get_connection()
-        body = {
-            'script': script,
-            'query': {
-                'match': {
-                    'upload_id': upload_id
-                }
-            }
-        }
-        conn.update_by_query(index, doc_type=[doc_type], body=body)
-
-    @staticmethod
-    def es_search(body):
-        """ Perform an elasticsearch and not elasticsearch_dsl search on the Calc index. """
-        return infrastructure.elastic_client.search(index=config.elastic.index_name, body=body)
-
-    @staticmethod
-    def upload_exists(upload_hash):
-        """ Returns true if there are already calcs from the given upload. """
-        # TODO this is deprecated and should be varified via repository files
-        search = Search(using=infrastructure.elastic_client, index=config.elastic.index_name) \
-            .query('match', upload_hash=upload_hash) \
-            .execute()
-
-        return len(search) > 0
-
-    @staticmethod
-    def upload_calcs(upload_id):
-        """ Returns an iterable over all entries for the given upload_id. """
-        return Search(using=infrastructure.elastic_client, index=config.elastic.index_name) \
-            .query('match', upload_id=upload_id) \
-            .scan()
-
-    @property
-    def json_dict(self):
-        """ A json serializable dictionary representation. """
-        data = self.to_dict()
-
-        upload_time = data.get('upload_time', None)
-        if upload_time is not None and isinstance(upload_time, datetime):
-            data['upload_time'] = data['upload_time'].isoformat()
-
-        data['archive_id'] = self.archive_id
-
-        return {key: value for key, value in data.items() if value is not None}
diff --git a/nomad/search.py b/nomad/search.py
new file mode 100644
index 0000000000000000000000000000000000000000..78d6bff685240e04eb65e9e45183a1a732dd8819
--- /dev/null
+++ b/nomad/search.py
@@ -0,0 +1,185 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This module represents calculations in elastic search.
+"""
+
+from elasticsearch.exceptions import ConflictError, ConnectionTimeout
+from datetime import datetime
+import time
+from elasticsearch_dsl import Document, InnerDoc, Keyword, Text, Long, Integer, Date, \
+    Nested
+
+from nomad import config, datamodel, coe_repo, infrastructure, datamodel
+
+
+class AlreadyExists(Exception): pass
+
+
+class UserData(InnerDoc):
+    repository_open_date = Date(format='epoch_millis')
+    repository_access_now = Keyword()
+    repository_comment = Keyword()
+
+    section_citation = Nested(properties=dict(
+        citation_repo_id=Integer(),
+        citation_value=Keyword()
+    ))
+
+    section_author_info = Nested(properties=dict(
+        author_repo_id=Integer(index=True),
+        author_first_name=Keyword(),
+        author_last_name=Keyword(),
+        author_name=Text()
+    ))
+
+    section_shared_with = Nested(properties=dict(
+        shared_with_repo_id=Keyword(),
+        shared_with_first_name=Keyword(),
+        shared_with_last_name=Keyword(),
+        shared_with_username=Keyword(),
+        shared_with_name=Text()
+    ))
+
+    section_repository_dataset = Nested(properties=dict(
+        dataset_checksum=Keyword(),
+        dataset_pid=Keyword(),
+        dataset_name=Keyword(),
+        dataset_parent_pid=Keyword(),
+        dataset_calc_id=Long(),
+        dataset_parent_calc_id=Long(),
+        section_dataset_doi=Nested(properties=dict(
+            dataset_doi_name=Keyword(),
+            dataset_doi_id=Long()))
+    ))
+
+    def fill_from_coe_repo(self, calc: coe_repo.Calc):
+        pass
+
+
+class CalcData(InnerDoc):
+    repository_checksum = Keyword()
+    repository_chemical_formula = Keyword()
+    repository_parser_id = Keyword()
+    repository_atomic_elements = Keyword(store=True)
+    repository_atomic_elements_count = Integer(store=True)
+    repository_basis_set_type = Keyword(store=True)
+    repository_code_version = Keyword(store=True)
+    repository_crystal_system = Keyword(store=True)
+    repository_program_name = Keyword(store=True)
+    repository_spacegroup_nr = Keyword(store=True)
+    repository_system_type = Keyword(store=True)
+    repository_xc_treatment = Keyword(store=True)
+
+
+class Calc(InnerDoc):
+    main_file_uri = Keyword()
+    secondary_file_uris = Keyword()
+    repository_filepaths = Keyword(index=False)
+    repository_archive_gid = Keyword()
+    repository_calc_id = Long(store=True)
+    repository_calc_pid = Keyword(store=True)
+    upload_id = Long()
+    upload_date = Date(format='epoch_millis')
+    repository_grouping_checksum = Keyword()
+
+    section_repository_userdata = Nested(UserData)
+    section_repository_parserdata = Nested(CalcData)
+
+    section_uploader_info = Nested(properties=dict(
+        uploader_repo_id=Keyword(),
+        uploader_first_name=Keyword(),
+        uploader_last_name=Keyword(),
+        uploader_username=Keyword(),
+        uploader_name=Text()
+    ))
+
+
+class Entry(Document, datamodel.Entity):
+    class Index:
+        name = config.elastic.coe_repo_calcs_index_name
+
+    calc_id = Keyword()
+    upload_id = Keyword()
+    section_repository_info = Nested(Calc)
+
+    def __init__(self, upload_id: str, calc_id: str) -> None:
+        super().__init__(meta=dict(id=calc_id))
+        self.calc_id = calc_id
+        self.upload_id = upload_id
+
+    def persist(self, **kwargs):
+        """
+            Persist this entry to elastic search. Kwargs are passed to elastic search.
+
+            Raises:
+                AlreadyExists: If the calculation already exists in elastic search. We use
+                    the elastic document lock here. The elastic document is IDed via the
+                    ``calc_id``.
+        """
+        try:
+            # In practive es operation might fail due to timeout under heavy loads/
+            # bad configuration. Retries with a small delay is a pragmatic solution.
+            e_after_retries = None
+            for _ in range(0, 2):
+                try:
+                    self.save(op_type='create', **kwargs)
+                    e_after_retries = None
+                    break
+                except ConnectionTimeout as e:
+                    e_after_retries = e
+                    time.sleep(1)
+                except ConflictError as e:  # this should never happen, but happens
+                    e_after_retries = e
+                    time.sleep(1)
+                else:
+                    raise e
+            if e_after_retries is not None:
+                # if we had and exception and could not fix with retries, throw it
+                raise e_after_retries  # pylint: disable=E0702
+        except ConflictError:
+            raise AlreadyExists('Calculation %s/%s does already exist.' % (self.upload_id, self.calc_id))
+
+    @classmethod
+    def update_by_query(cls, upload_id, script):
+        """ Update all entries of a given upload via elastic script. """
+        index = cls._default_index()
+        doc_type = cls._doc_type.name
+        conn = cls._get_connection()
+        body = {
+            'script': script,
+            'query': {
+                'match': {
+                    'upload_id': upload_id
+                }
+            }
+        }
+        conn.update_by_query(index, doc_type=[doc_type], body=body)
+
+    @staticmethod
+    def es_search(body):
+        """ Perform an elasticsearch and not elasticsearch_dsl search on the Calc index. """
+        return infrastructure.elastic_client.search(index=config.elastic.index_name, body=body)
+
+    @property
+    def json_dict(self):
+        """ A json serializable dictionary representation. """
+        data = self.to_dict()
+
+        upload_time = data.get('upload_time', None)
+        if upload_time is not None and isinstance(upload_time, datetime):
+            data['upload_time'] = data['upload_time'].isoformat()
+
+        return {key: value for key, value in data.items() if value is not None}
diff --git a/nomad/utils.py b/nomad/utils.py
index fa591552b0e2a3316158c4e3d1a97e9b4e6cb321..dc5be352ff033ac37fcda7bb42a26d04cd52df56 100644
--- a/nomad/utils.py
+++ b/nomad/utils.py
@@ -19,7 +19,7 @@
 
 Logging in nomad is structured. Structured logging means that log entries contain
 dictionaries with quantities related to respective events. E.g. having the code,
-parser, parser version, calc_hash, mainfile, etc. for all events that happen during
+parser, parser version, calc_id, mainfile, etc. for all events that happen during
 calculation processing. This means the :func:`get_logger` and all logger functions
 take keyword arguments for structured data. Otherwise :func:`get_logger` can
 be used similar to the standard *logging.getLogger*.
@@ -33,8 +33,7 @@ Depending on the configuration all logs will also be send to a central logstash.
 .. autofunc::nomad.utils.lnr
 """
 
-from typing import Union, IO, cast, List
-import hashlib
+from typing import List
 import base64
 import logging
 import structlog
@@ -46,9 +45,13 @@ import json
 import uuid
 import time
 import re
+from werkzeug.exceptions import HTTPException
 
 from nomad import config
 
+default_hash_len = 28
+""" Length of hashes and hash-based ids (e.g. calc, upload) in nomad. """
+
 
 def sanitize_logevent(event: str) -> str:
     """
@@ -94,8 +97,8 @@ class LogstashFormatter(logstash.formatter.LogstashFormatterBase):
                 if key in ('event', 'stack_info', 'id', 'timestamp'):
                     continue
                 elif key in (
-                        'upload_hash', 'archive_id', 'upload_id', 'calc_hash', 'mainfile',
-                        'service'):
+                        'upload_id', 'calc_id', 'mainfile',
+                        'service', 'release'):
                     key = 'nomad.%s' % key
                 else:
                     key = '%s.%s' % (record.name, key)
@@ -123,7 +126,7 @@ def add_logstash_handler(logger):
         logstash_handler = logstash.TCPLogstashHandler(
             config.logstash.host,
             config.logstash.tcp_port, version=1)
-        logstash_handler.formatter = LogstashFormatter(tags=['nomad', config.service])
+        logstash_handler.formatter = LogstashFormatter(tags=['nomad', config.service, config.release])
         logstash_handler.setLevel(config.logstash.level)
         logger.addHandler(logstash_handler)
 
@@ -169,28 +172,6 @@ def create_uuid() -> str:
     return base64.b64encode(uuid.uuid4().bytes, altchars=b'-_').decode('utf-8')[0:-2]
 
 
-def hash(obj: Union[IO, str], length=28) -> str:
-    """
-    Returns a web-save base64 encoded 28 long hash for the given contents.
-    First 28 character of an URL safe base 64 encoded sha512 digest.
-    """
-    hash = hashlib.sha512()
-    if getattr(obj, 'read', None) is not None:
-        for data in iter(lambda: cast(IO, obj).read(65536), b''):
-            hash.update(data)
-    elif isinstance(obj, str):
-        hash.update(obj.encode('utf-8'))
-
-    return websave_hash(hash.digest(), length)
-
-
-def websave_hash(hash, length=0):
-    if length > 0:
-        return base64.b64encode(hash, altchars=b'-_')[0:28].decode('utf-8')
-    else:
-        return base64.b64encode(hash, altchars=b'-_')[0:-2].decode('utf-8')
-
-
 def get_logger(name, **kwargs):
     """
     Returns a structlog logger that is already attached with a logstash handler.
@@ -199,7 +180,7 @@ def get_logger(name, **kwargs):
     if name.startswith('nomad.'):
         name = '.'.join(name.split('.')[:2])
 
-    logger = structlog.get_logger(name, service=config.service, **kwargs)
+    logger = structlog.get_logger(name, service=config.service, release=config.release, **kwargs)
     return logger
 
 
@@ -215,6 +196,9 @@ def lnr(logger, event, **kwargs):
     """
     try:
         yield
+    except HTTPException as e:
+        # ignore HTTPException as they are part of the normal flask error handling
+        raise e
     except Exception as e:
         logger.error(event, exc_info=e, **kwargs)
         raise e
@@ -251,8 +235,8 @@ def timer(logger, event, method='info', **kwargs):
 
 class archive:
     @staticmethod
-    def create(upload_hash: str, calc_hash: str) -> str:
-        return '%s/%s' % (upload_hash, calc_hash)
+    def create(upload_id: str, calc_id: str) -> str:
+        return '%s/%s' % (upload_id, calc_id)
 
     @staticmethod
     def items(archive_id: str) -> List[str]:
@@ -263,9 +247,36 @@ class archive:
         return archive.items(archive_id)[index]
 
     @staticmethod
-    def calc_hash(archive_id: str) -> str:
+    def calc_id(archive_id: str) -> str:
         return archive.item(archive_id, 1)
 
     @staticmethod
-    def upload_hash(archive_id: str) -> str:
+    def upload_id(archive_id: str) -> str:
         return archive.item(archive_id, 0)
+
+
+def to_tuple(self, *args):
+    return tuple(self[arg] for arg in args)
+
+
+class POPO(dict):
+    """
+    A dict subclass that uses attributes as key/value pairs.
+    """
+    def __init__(self, **kwargs):
+        super().__init__(**kwargs)
+
+    def __getattr__(self, name):
+        if name in self:
+            return self[name]
+        else:
+            raise AttributeError("No such attribute: " + name)
+
+    def __setattr__(self, name, value):
+        self[name] = value
+
+    def __delattr__(self, name):
+        if name in self:
+            del self[name]
+        else:
+            raise AttributeError("No such attribute: " + name)
diff --git a/ops/README.md b/ops/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..e117000af73dc71e93cff70e68882ccc605aa0e8
--- /dev/null
+++ b/ops/README.md
@@ -0,0 +1,4 @@
+## Overview
+
+Read the [introduction](./introduction.html) and [setup](./setup.html) for input on
+the different nomad services. This is about how to deploy and operate these services.
\ No newline at end of file
diff --git a/ops/containers/README.md b/ops/containers/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..1097678cc9762dec2389d07d32bc639dc553c2d1
--- /dev/null
+++ b/ops/containers/README.md
@@ -0,0 +1,16 @@
+## Containers
+
+### ELK
+
+This image is based on the popular elk-stack docker image:
+[github](https://github.com/spujadas/elk-docker),
+[readthedocs](http://elk-docker.readthedocs.io/).
+
+Changes
+- disabled ssl for beats communication to logstash server
+- added tcp input
+- simplified elastic search output (don't now how to set metric and other vars yet :-()
+- added kibana.yml::server.basePath="/nomad/kibana"
+
+The file `elk/kibana_objects.json` contains an export of nomad specific searches,
+visualizations, and dashboard.
\ No newline at end of file
diff --git a/ops/containers/elk/README.md b/ops/containers/elk/README.md
deleted file mode 100644
index 5ff052bf2d3e81d2eb8641144f98c53a65938e98..0000000000000000000000000000000000000000
--- a/ops/containers/elk/README.md
+++ /dev/null
@@ -1,25 +0,0 @@
-# ELK
-
-This image is based on the populer elk-stack docker image:
-[github](https://github.com/spujadas/elk-docker),
-[readthedocs](http://elk-docker.readthedocs.io/).
-
-## Changes
-- disabled ssl for beats communication to logstash server
-- added tcp input
-- simplified elastic search output (don't now how to set metric and other vars yet :-()
-- added kibana.yml::server.basePath="/nomad/kibana"
-
-
-## Usage
-You can run this image outside the usual docker-compose.
-
-To use this image with reverse proxy in nginx, use:
-
-```
-location ~ ^/nomad/kibana/(.*)$ {
-    proxy_pass http://130.183.207.116:15601/$1;
-    proxy_set_header Host $host;
-    proxy_set_header X-Real-IP $remote_addr;
-}
-```
\ No newline at end of file
diff --git a/ops/containers/elk/kibana_objects.json b/ops/containers/elk/kibana_objects.json
index 8186edc7fc71dc77fd88f6713521cc4856cc3f23..4fe34235c1ffed563cf8868cfa7bec1f8b16c122 100644
--- a/ops/containers/elk/kibana_objects.json
+++ b/ops/containers/elk/kibana_objects.json
@@ -1,1928 +1,17 @@
 [
   {
-    "_id": "79ffd6e0-faa0-11e6-947f-177f697178b8",
-    "_type": "dashboard",
-    "_source": {
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"highlightAll\":true,\"version\":true,\"query\":{\"language\":\"lucene\",\"query\":\"beat.name:\\\"enc-preprocessing-nomad\\\"\"}}"
-      },
-      "optionsJSON": "{\"darkTheme\":false}",
-      "panelsJSON": "[{\"col\":1,\"id\":\"6b7b9a40-faa1-11e6-86b1-cd7735ff7e23\",\"panelIndex\":1,\"row\":12,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"4d546850-1b15-11e7-b09e-037021c4f8df\",\"panelIndex\":2,\"row\":6,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"089b85d0-1b16-11e7-b09e-037021c4f8df\",\"panelIndex\":3,\"row\":12,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"bfa5e400-1b16-11e7-b09e-037021c4f8df\",\"panelIndex\":4,\"row\":9,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"e0f001c0-1b18-11e7-b09e-037021c4f8df\",\"panelIndex\":5,\"row\":15,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"2e224660-1b19-11e7-b09e-037021c4f8df\",\"panelIndex\":6,\"row\":15,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"ab2d1e90-1b1a-11e7-b09e-037021c4f8df\",\"panelIndex\":7,\"row\":6,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"4e4bb1e0-1b1b-11e7-b09e-037021c4f8df\",\"panelIndex\":8,\"row\":9,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":5,\"id\":\"26732e20-1b91-11e7-bec4-a5e9ec5cab8b\",\"panelIndex\":9,\"row\":2,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":1,\"id\":\"83e12df0-1b91-11e7-bec4-a5e9ec5cab8b\",\"panelIndex\":10,\"row\":2,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":3,\"id\":\"d3166e80-1b91-11e7-bec4-a5e9ec5cab8b\",\"panelIndex\":11,\"row\":2,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":7,\"id\":\"522ee670-1b92-11e7-bec4-a5e9ec5cab8b\",\"panelIndex\":12,\"row\":2,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":9,\"id\":\"1aae9140-1b93-11e7-8ada-3df93aab833e\",\"panelIndex\":13,\"row\":2,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":9,\"id\":\"34f97ee0-1b96-11e7-8ada-3df93aab833e\",\"panelIndex\":14,\"row\":4,\"size_x\":4,\"size_y\":2,\"type\":\"visualization\"},{\"col\":1,\"id\":\"System-Navigation\",\"panelIndex\":16,\"row\":1,\"size_x\":6,\"size_y\":1,\"type\":\"visualization\"},{\"col\":1,\"id\":\"19e123b0-4d5a-11e7-aee5-fdc812cc3bec\",\"panelIndex\":21,\"row\":4,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":3,\"id\":\"d2e80340-4d5c-11e7-aa29-87a97a796de6\",\"panelIndex\":22,\"row\":4,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":7,\"id\":\"825fdb80-4d1d-11e7-b5f2-2b7c1895bf32\",\"panelIndex\":23,\"row\":4,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":11,\"id\":\"96976150-4d5d-11e7-aa29-87a97a796de6\",\"panelIndex\":25,\"row\":2,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":1,\"id\":\"99381c80-4d60-11e7-9a4c-ed99bbcaa42b\",\"panelIndex\":27,\"row\":18,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"c5e3cf90-4d60-11e7-9a4c-ed99bbcaa42b\",\"panelIndex\":28,\"row\":18,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":5,\"id\":\"590a60f0-5d87-11e7-8884-1bb4c3b890e4\",\"panelIndex\":29,\"row\":4,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":7,\"id\":\"3d65d450-a9c3-11e7-af20-67db8aecb295\",\"panelIndex\":30,\"row\":1,\"size_x\":6,\"size_y\":1,\"type\":\"visualization\"}]",
-      "timeRestore": false,
-      "title": "[Metricbeat System] Host overview",
-      "uiStateJSON": "{\"P-29\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}}}",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "AV4RGUqo5NkDleZmzKuZ",
-    "_type": "dashboard",
-    "_source": {
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}}}],\"highlightAll\":true,\"version\":true}"
-      },
-      "optionsJSON": "{\"darkTheme\":false}",
-      "panelsJSON": "[{\"col\":7,\"id\":\"022a54c0-2bf5-11e7-859b-f78b612cde28\",\"panelIndex\":1,\"row\":1,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"44f12b40-2bf4-11e7-859b-f78b612cde28\",\"panelIndex\":2,\"row\":7,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":4,\"id\":\"cd059410-2bfb-11e7-859b-f78b612cde28\",\"panelIndex\":5,\"row\":1,\"size_x\":3,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"e1018b90-2bfb-11e7-859b-f78b612cde28\",\"panelIndex\":6,\"row\":4,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"d6564360-2bfc-11e7-859b-f78b612cde28\",\"panelIndex\":7,\"row\":7,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"16fa4470-2bfd-11e7-859b-f78b612cde28\",\"panelIndex\":8,\"row\":10,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"294546b0-30d6-11e7-8df8-6d3604a72912\",\"panelIndex\":9,\"row\":10,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"408fccf0-30d6-11e7-8df8-6d3604a72912\",\"panelIndex\":10,\"row\":1,\"size_x\":3,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"58e644f0-30d6-11e7-8df8-6d3604a72912\",\"panelIndex\":11,\"row\":13,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"a4c9d360-30df-11e7-8df8-6d3604a72912\",\"panelIndex\":12,\"row\":13,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":5,\"id\":\"174a6ad0-30e0-11e7-8df8-6d3604a72912\",\"panelIndex\":13,\"row\":4,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"7aac4fd0-30e0-11e7-8df8-6d3604a72912\",\"panelIndex\":14,\"row\":4,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":3,\"id\":\"da1ff7c0-30ed-11e7-b9e5-2b5b07213ab3\",\"panelIndex\":15,\"row\":4,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"}]",
-      "timeRestore": false,
-      "title": "[Metricbeat Kubernetes] Overview",
-      "uiStateJSON": "{}",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "AV4REOpp5NkDleZmzKkE",
-    "_type": "dashboard",
-    "_source": {
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}}}],\"highlightAll\":true,\"version\":true}"
-      },
-      "optionsJSON": "{\"darkTheme\":false}",
-      "panelsJSON": "[{\"col\":1,\"id\":\"Docker-containers\",\"panelIndex\":1,\"row\":1,\"size_x\":7,\"size_y\":5,\"type\":\"visualization\"},{\"col\":8,\"id\":\"Docker-Number-of-Containers\",\"panelIndex\":2,\"row\":1,\"size_x\":5,\"size_y\":2,\"type\":\"visualization\"},{\"col\":8,\"id\":\"Docker-containers-per-host\",\"panelIndex\":3,\"row\":3,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":10,\"id\":\"Docker-images-and-names\",\"panelIndex\":7,\"row\":3,\"size_x\":3,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"Docker-CPU-usage\",\"panelIndex\":4,\"row\":6,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"Docker-memory-usage\",\"panelIndex\":5,\"row\":6,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"Docker-Network-IO\",\"panelIndex\":6,\"row\":9,\"size_x\":12,\"size_y\":3,\"type\":\"visualization\"}]",
-      "timeRestore": false,
-      "title": "[Metricbeat Docker] Overview",
-      "uiStateJSON": "{\"P-1\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":1,\"direction\":\"asc\"}}}},\"P-2\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}},\"P-3\":{\"vis\":{\"legendOpen\":true}},\"P-5\":{\"vis\":{\"legendOpen\":true}},\"P-7\":{\"vis\":{\"legendOpen\":true}}}",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Metricbeat-Apache-HTTPD-server-status",
-    "_type": "dashboard",
-    "_source": {
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"query\":{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true,\"default_field\":\"*\"}},\"language\":\"lucene\"},\"highlightAll\":true,\"version\":true}"
-      },
-      "optionsJSON": "{\"darkTheme\":false}",
-      "panelsJSON": "[{\"id\":\"Apache-HTTPD-CPU\",\"type\":\"visualization\",\"panelIndex\":1,\"size_x\":6,\"size_y\":3,\"col\":7,\"row\":10},{\"id\":\"Apache-HTTPD-Hostname-list\",\"type\":\"visualization\",\"panelIndex\":2,\"size_x\":3,\"size_y\":3,\"col\":1,\"row\":1},{\"id\":\"Apache-HTTPD-Load1-slash-5-slash-15\",\"type\":\"visualization\",\"panelIndex\":3,\"size_x\":6,\"size_y\":3,\"col\":1,\"row\":10},{\"id\":\"Apache-HTTPD-Scoreboard\",\"type\":\"visualization\",\"panelIndex\":4,\"size_x\":12,\"size_y\":3,\"col\":1,\"row\":7},{\"id\":\"Apache-HTTPD-Total-accesses-and-kbytes\",\"type\":\"visualization\",\"panelIndex\":5,\"size_x\":6,\"size_y\":3,\"col\":7,\"row\":1},{\"id\":\"Apache-HTTPD-Uptime\",\"type\":\"visualization\",\"panelIndex\":6,\"size_x\":3,\"size_y\":3,\"col\":4,\"row\":1},{\"id\":\"Apache-HTTPD-Workers\",\"type\":\"visualization\",\"panelIndex\":7,\"size_x\":12,\"size_y\":3,\"col\":1,\"row\":4}]",
-      "timeRestore": false,
-      "title": "[Metricbeat Apache] Overview",
-      "uiStateJSON": "{\"P-2\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-6\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}},\"P-5\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}}}",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "f2dc7320-f519-11e6-a3c9-9d1f7c42b045",
-    "_type": "dashboard",
-    "_source": {
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"query\":{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"language\":\"lucene\"},\"highlightAll\":true,\"version\":true}"
-      },
-      "optionsJSON": "{\"darkTheme\":false}",
-      "panelsJSON": "[{\"col\":1,\"id\":\"58000780-f529-11e6-844d-b170e2f0a07e\",\"panelIndex\":8,\"row\":1,\"size_x\":12,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"95388680-f52a-11e6-969c-518c48c913e4\",\"panelIndex\":9,\"row\":4,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"col\":9,\"id\":\"b59a5200-f52a-11e6-969c-518c48c913e4\",\"panelIndex\":10,\"row\":4,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"col\":5,\"id\":\"9a9a8bf0-f52a-11e6-969c-518c48c913e4\",\"panelIndex\":11,\"row\":4,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"col\":7,\"id\":\"b046cb80-f52a-11e6-969c-518c48c913e4\",\"panelIndex\":12,\"row\":8,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"id\":\"ab226b50-f52a-11e6-969c-518c48c913e4\",\"panelIndex\":13,\"row\":8,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"}]",
-      "timeRestore": false,
-      "title": "[Metricbeat Golang] Overview",
-      "uiStateJSON": "{}",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Metricbeat-MongoDB",
-    "_type": "dashboard",
-    "_source": {
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"query\":{\"language\":\"lucene\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"default_field\":\"*\",\"query\":\"*\"}}},\"highlightAll\":true,\"version\":true}"
-      },
-      "optionsJSON": "{\"darkTheme\":false}",
-      "panelsJSON": "[{\"col\":1,\"id\":\"MongoDB-hosts\",\"panelIndex\":1,\"row\":1,\"size_x\":8,\"size_y\":3,\"type\":\"visualization\"},{\"col\":9,\"id\":\"MongoDB-Engine-ampersand-Version\",\"panelIndex\":4,\"row\":1,\"size_x\":4,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"MongoDB-operation-counters\",\"panelIndex\":2,\"row\":4,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"MongoDB-Concurrent-transactions-Read\",\"panelIndex\":6,\"row\":4,\"size_x\":3,\"size_y\":3,\"type\":\"visualization\"},{\"col\":10,\"id\":\"MongoDB-Concurrent-transactions-Write\",\"panelIndex\":7,\"row\":4,\"size_x\":3,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"MongoDB-memory-stats\",\"panelIndex\":5,\"row\":10,\"size_x\":12,\"size_y\":4,\"type\":\"visualization\"},{\"col\":7,\"id\":\"MongoDB-asserts\",\"panelIndex\":3,\"row\":7,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"MongoDB-WiredTiger-Cache\",\"panelIndex\":8,\"row\":7,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"}]",
-      "timeRestore": false,
-      "title": "[Metricbeat MongoDB] Overview",
-      "uiStateJSON": "{\"P-1\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}}",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Metricbeat-system-overview",
-    "_type": "dashboard",
-    "_source": {
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"highlightAll\":true,\"version\":true,\"query\":{\"language\":\"lucene\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"default_field\":\"*\",\"query\":\"*\"}}}}"
-      },
-      "optionsJSON": "{\"darkTheme\":false}",
-      "panelsJSON": "[{\"col\":1,\"id\":\"System-Navigation\",\"panelIndex\":9,\"row\":1,\"size_x\":12,\"size_y\":1,\"type\":\"visualization\"},{\"col\":1,\"id\":\"c6f2ffd0-4d17-11e7-a196-69b9a7a020a9\",\"panelIndex\":11,\"row\":2,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":7,\"id\":\"fe064790-1b1f-11e7-bec4-a5e9ec5cab8b\",\"panelIndex\":12,\"row\":4,\"size_x\":6,\"size_y\":5,\"type\":\"visualization\"},{\"col\":1,\"id\":\"855899e0-1b1c-11e7-b09e-037021c4f8df\",\"panelIndex\":13,\"row\":4,\"size_x\":6,\"size_y\":5,\"type\":\"visualization\"},{\"col\":1,\"id\":\"7cdb1330-4d1a-11e7-a196-69b9a7a020a9\",\"panelIndex\":14,\"row\":9,\"size_x\":12,\"size_y\":6,\"type\":\"visualization\"},{\"col\":9,\"id\":\"522ee670-1b92-11e7-bec4-a5e9ec5cab8b\",\"panelIndex\":16,\"row\":2,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":11,\"id\":\"1aae9140-1b93-11e7-8ada-3df93aab833e\",\"panelIndex\":17,\"row\":2,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":7,\"id\":\"825fdb80-4d1d-11e7-b5f2-2b7c1895bf32\",\"panelIndex\":18,\"row\":2,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":5,\"id\":\"d3166e80-1b91-11e7-bec4-a5e9ec5cab8b\",\"panelIndex\":19,\"row\":2,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":3,\"id\":\"83e12df0-1b91-11e7-bec4-a5e9ec5cab8b\",\"panelIndex\":20,\"row\":2,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"}]",
-      "timeRestore": false,
-      "title": "[Metricbeat System] Overview",
-      "uiStateJSON": "{\"P-11\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}},\"P-12\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}},\"P-14\":{\"vis\":{\"defaultColors\":{\"0% - 15%\":\"rgb(247,252,245)\",\"15% - 30%\":\"rgb(199,233,192)\",\"30% - 45%\":\"rgb(116,196,118)\",\"45% - 60%\":\"rgb(35,139,69)\"}}},\"P-16\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}},\"P-2\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}},\"P-3\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}}}",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "AV4YjZ5pux-M-tCAunxK",
-    "_type": "dashboard",
-    "_source": {
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}}}],\"highlightAll\":true,\"version\":true}"
-      },
-      "optionsJSON": "{\"darkTheme\":false}",
-      "panelsJSON": "[{\"col\":1,\"id\":\"Redis-Clients-Metrics\",\"panelIndex\":2,\"row\":1,\"size_x\":3,\"size_y\":3,\"type\":\"visualization\"},{\"col\":4,\"id\":\"Redis-Connected-clients\",\"panelIndex\":1,\"row\":1,\"size_x\":5,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"Redis-hosts\",\"panelIndex\":3,\"row\":4,\"size_x\":12,\"size_y\":2,\"type\":\"visualization\"},{\"col\":1,\"id\":\"Redis-Server-Versions\",\"panelIndex\":4,\"row\":6,\"size_x\":4,\"size_y\":2,\"type\":\"visualization\"},{\"col\":5,\"id\":\"Redis-server-mode\",\"panelIndex\":5,\"row\":6,\"size_x\":4,\"size_y\":2,\"type\":\"visualization\"},{\"col\":9,\"id\":\"Redis-multiplexing-API\",\"panelIndex\":6,\"row\":6,\"size_x\":3,\"size_y\":2,\"type\":\"visualization\"},{\"col\":9,\"id\":\"Redis-Keyspaces\",\"panelIndex\":7,\"row\":1,\"size_x\":4,\"size_y\":3,\"type\":\"visualization\"}]",
-      "timeRestore": false,
-      "title": "[Metricbeat Redis] Overview",
-      "uiStateJSON": "{\"P-3\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-4\":{\"vis\":{\"legendOpen\":true}},\"P-2\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}}}",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "d9eba730-c991-11e7-9835-2f31fe08873b",
-    "_type": "dashboard",
-    "_source": {
-      "description": "Overview of the Windows Service States",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"language\":\"lucene\",\"query\":\"\"},\"filter\":[],\"highlightAll\":true,\"version\":true}"
-      },
-      "optionsJSON": "{\"darkTheme\":false}",
-      "panelsJSON": "[{\"col\":4,\"id\":\"eb8277d0-c98c-11e7-9835-2f31fe08873b\",\"panelIndex\":1,\"row\":4,\"size_x\":9,\"size_y\":5,\"type\":\"visualization\"},{\"col\":1,\"id\":\"23a5fff0-c98e-11e7-9835-2f31fe08873b\",\"panelIndex\":2,\"row\":4,\"size_x\":3,\"size_y\":5,\"type\":\"visualization\"},{\"col\":1,\"id\":\"830c45f0-c991-11e7-9835-2f31fe08873b\",\"panelIndex\":3,\"row\":1,\"size_x\":4,\"size_y\":3,\"type\":\"visualization\"},{\"size_x\":4,\"size_y\":3,\"panelIndex\":4,\"type\":\"visualization\",\"id\":\"35f5ad60-c996-11e7-9835-2f31fe08873b\",\"col\":5,\"row\":1},{\"size_x\":4,\"size_y\":3,\"panelIndex\":5,\"type\":\"visualization\",\"id\":\"c36b2ba0-ca29-11e7-9835-2f31fe08873b\",\"col\":9,\"row\":1}]",
-      "timeRestore": false,
-      "title": "[Metricbeat Windows] Services",
-      "uiStateJSON": "{\"P-1\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-2\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-4\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}},\"P-5\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}}}",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "66881e90-0006-11e7-bf7f-c9acc3d3e306",
-    "_type": "dashboard",
-    "_source": {
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"query\":{\"language\":\"lucene\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"default_field\":\"*\",\"query\":\"*\"}}},\"highlightAll\":true,\"version\":true}"
-      },
-      "optionsJSON": "{\"darkTheme\":false}",
-      "panelsJSON": "[{\"col\":1,\"id\":\"e784dc50-0005-11e7-bf7f-c9acc3d3e306\",\"panelIndex\":1,\"row\":4,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"0f506420-0006-11e7-bf7f-c9acc3d3e306\",\"panelIndex\":2,\"row\":7,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"45a00c10-0006-11e7-bf7f-c9acc3d3e306\",\"panelIndex\":4,\"row\":4,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"1eda2280-0008-11e7-82f3-2f380154876c\",\"panelIndex\":5,\"row\":1,\"size_x\":12,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"1ede99e0-0009-11e7-8cd4-73b67e9e3f3c\",\"panelIndex\":7,\"row\":10,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"4c36c420-000a-11e7-8cd4-73b67e9e3f3c\",\"panelIndex\":8,\"row\":7,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"a2175300-000a-11e7-b001-85aac4878445\",\"panelIndex\":9,\"row\":10,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"}]",
-      "timeRestore": false,
-      "title": "[Metricbeat MySQL] Overview",
-      "uiStateJSON": "{}",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "AV4YobKIge1VCbKU_qVo",
-    "_type": "dashboard",
-    "_source": {
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"highlightAll\":true,\"version\":true,\"query\":{\"language\":\"lucene\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"default_field\":\"*\",\"query\":\"*\"}}}}"
-      },
-      "optionsJSON": "{\"darkTheme\":false}",
-      "panelsJSON": "[{\"col\":1,\"id\":\"RabbitMQ-Memory-Usage\",\"panelIndex\":8,\"row\":1,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":8,\"id\":\"Rabbitmq-Number-of-Nodes\",\"panelIndex\":2,\"row\":1,\"size_x\":3,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"RabbitMQ-Erlang-Process-Usage\",\"panelIndex\":10,\"row\":4,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"RabbitMQ-Queue-Index-Operations\",\"panelIndex\":9,\"row\":4,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"}]",
-      "timeRestore": false,
-      "title": "[Metricbeat RabbitMQ] Overview",
-      "uiStateJSON": "{\"P-2\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}}}",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "CPU-slash-Memory-per-container",
-    "_type": "dashboard",
-    "_source": {
-      "title": "[Metricbeat System] Containers overview",
-      "hits": 0,
-      "description": "",
-      "panelsJSON": "[{\"panelIndex\":\"2\",\"gridData\":{\"x\":0,\"y\":5,\"w\":48,\"h\":15,\"i\":\"2\"},\"embeddableConfig\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"id\":\"Container-CPU-usage\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"3\",\"gridData\":{\"x\":0,\"y\":0,\"w\":48,\"h\":5,\"i\":\"3\"},\"id\":\"System-Navigation\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"4\",\"gridData\":{\"x\":0,\"y\":20,\"w\":48,\"h\":26,\"i\":\"4\"},\"embeddableConfig\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"id\":\"Container-Memory-stats\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"5\",\"gridData\":{\"x\":0,\"y\":46,\"w\":48,\"h\":20,\"i\":\"5\"},\"embeddableConfig\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"id\":\"Container-Block-IO\",\"type\":\"visualization\",\"version\":\"6.3.2\"}]",
-      "optionsJSON": "{\"darkTheme\":false,\"useMargins\":false}",
-      "version": 1,
-      "timeRestore": false,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"highlightAll\":true,\"version\":true,\"query\":{\"language\":\"lucene\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"default_field\":\"*\",\"query\":\"*\"}}}}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "572efb40-c7cd-11e8-af80-b5004896a534",
-    "_type": "dashboard",
-    "_source": {
-      "title": "Processing statistics [nomad]",
-      "hits": 0,
-      "description": "",
-      "panelsJSON": "[{\"panelIndex\":\"1\",\"gridData\":{\"x\":0,\"y\":7,\"w\":24,\"h\":15,\"i\":\"1\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"8568ba90-c61b-11e8-af80-b5004896a534\",\"embeddableConfig\":{}},{\"panelIndex\":\"3\",\"gridData\":{\"x\":24,\"y\":7,\"w\":24,\"h\":15,\"i\":\"3\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"7a71d990-c7c0-11e8-af80-b5004896a534\",\"embeddableConfig\":{}},{\"panelIndex\":\"4\",\"gridData\":{\"x\":19,\"y\":22,\"w\":14,\"h\":15,\"i\":\"4\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"a2e3f4e0-c7b0-11e8-af80-b5004896a534\",\"embeddableConfig\":{}},{\"panelIndex\":\"5\",\"gridData\":{\"x\":0,\"y\":22,\"w\":19,\"h\":15,\"i\":\"5\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"50c32d20-c7b0-11e8-af80-b5004896a534\",\"embeddableConfig\":{}},{\"panelIndex\":\"6\",\"gridData\":{\"x\":0,\"y\":0,\"w\":10,\"h\":7,\"i\":\"6\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"b06a49b0-c2f2-11e8-b9e8-d74f7e7408ed\",\"embeddableConfig\":{}},{\"panelIndex\":\"7\",\"gridData\":{\"x\":10,\"y\":0,\"w\":10,\"h\":7,\"i\":\"7\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"1103dc60-c2f2-11e8-b9e8-d74f7e7408ed\",\"embeddableConfig\":{}},{\"panelIndex\":\"8\",\"gridData\":{\"x\":20,\"y\":0,\"w\":9,\"h\":7,\"i\":\"8\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"fd77bd00-c67b-11e8-af80-b5004896a534\",\"embeddableConfig\":{}},{\"panelIndex\":\"9\",\"gridData\":{\"x\":33,\"y\":22,\"w\":15,\"h\":15,\"i\":\"9\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"d99f6c10-c7b2-11e8-af80-b5004896a534\",\"embeddableConfig\":{}}]",
-      "optionsJSON": "{\"darkTheme\":false,\"useMargins\":true,\"hidePanelTitles\":false}",
-      "version": 1,
-      "timeRestore": false,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[],\"highlightAll\":true,\"version\":true}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "ce75d080-c2ef-11e8-b9e8-d74f7e7408ed",
-    "_type": "dashboard",
-    "_source": {
-      "title": "Processing errors [nomad]",
-      "hits": 0,
-      "description": "",
-      "panelsJSON": "[{\"panelIndex\":\"1\",\"gridData\":{\"x\":0,\"y\":0,\"w\":24,\"h\":14,\"i\":\"1\"},\"embeddableConfig\":{\"vis\":{\"legendOpen\":false}},\"id\":\"b16c30b0-c2ef-11e8-b9e8-d74f7e7408ed\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"3\",\"gridData\":{\"x\":37,\"y\":14,\"w\":11,\"h\":7,\"i\":\"3\"},\"embeddableConfig\":{\"spy\":null},\"id\":\"1103dc60-c2f2-11e8-b9e8-d74f7e7408ed\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"7\",\"gridData\":{\"x\":37,\"y\":21,\"w\":11,\"h\":7,\"i\":\"7\"},\"embeddableConfig\":{},\"id\":\"fd77bd00-c67b-11e8-af80-b5004896a534\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"12\",\"gridData\":{\"x\":0,\"y\":14,\"w\":37,\"h\":21,\"i\":\"12\"},\"embeddableConfig\":{},\"id\":\"18785d30-c7cb-11e8-af80-b5004896a534\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"13\",\"gridData\":{\"x\":37,\"y\":28,\"w\":11,\"h\":7,\"i\":\"13\"},\"embeddableConfig\":{},\"id\":\"b06a49b0-c2f2-11e8-b9e8-d74f7e7408ed\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"14\",\"gridData\":{\"x\":24,\"y\":0,\"w\":24,\"h\":14,\"i\":\"14\"},\"embeddableConfig\":{\"vis\":{\"legendOpen\":false}},\"id\":\"8bcaf100-c7d3-11e8-af80-b5004896a534\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"15\",\"gridData\":{\"x\":0,\"y\":35,\"w\":24,\"h\":15,\"i\":\"15\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"b73d0810-c7d2-11e8-af80-b5004896a534\",\"embeddableConfig\":{}}]",
-      "optionsJSON": "{\"darkTheme\":false,\"hidePanelTitles\":false,\"useMargins\":true}",
-      "version": 1,
-      "timeRestore": false,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"language\":\"lucene\",\"query\":\"\"},\"filter\":[],\"highlightAll\":true,\"version\":true}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Metricbeat-Docker",
-    "_type": "search",
-    "_source": {
-      "columns": [
-        "_source"
-      ],
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"metricbeat-*\",\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647},\"query\":{\"query_string\":{\"query\":\"metricset.module:docker\",\"analyze_wildcard\":true}}}"
-      },
-      "sort": [
-        "@timestamp",
-        "desc"
-      ],
-      "title": "Metricbeat Docker",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Apache-HTTPD",
-    "_type": "search",
-    "_source": {
-      "columns": [
-        "_source"
-      ],
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"metricbeat-*\",\"query\":{\"query_string\":{\"query\":\"metricset.module: apache\",\"analyze_wildcard\":true}},\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647}}"
-      },
-      "sort": [
-        "@timestamp",
-        "desc"
-      ],
-      "title": "Apache HTTPD",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "MongoDB-search",
-    "_type": "search",
-    "_source": {
-      "columns": [
-        "_source"
-      ],
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"metricbeat-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"metricset.module:mongodb\"}},\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647}}"
-      },
-      "sort": [
-        "@timestamp",
-        "desc"
-      ],
-      "title": "MongoDB search",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Metricbeat-Redis",
-    "_type": "search",
-    "_source": {
-      "columns": [
-        "_source"
-      ],
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"metricbeat-*\",\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647},\"query\":{\"query_string\":{\"query\":\"metricset.module:redis\",\"analyze_wildcard\":true}}}"
-      },
-      "sort": [
-        "@timestamp",
-        "desc"
-      ],
-      "title": "Metricbeat Redis",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "b6b7ccc0-c98d-11e7-9835-2f31fe08873b",
-    "_type": "search",
-    "_source": {
-      "columns": [
-        "beat.name",
-        "windows.service.display_name",
-        "windows.service.state",
-        "windows.service.start_type",
-        "windows.service.uptime.ms",
-        "windows.service.pid",
-        "windows.service.exit_code"
-      ],
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"metricbeat-*\",\"highlightAll\":true,\"version\":true,\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[{\"meta\":{\"negate\":false,\"index\":\"metricbeat-*\",\"type\":\"phrase\",\"key\":\"metricset.module\",\"value\":\"windows\",\"params\":{\"query\":\"windows\",\"type\":\"phrase\"},\"disabled\":false,\"alias\":null},\"query\":{\"match\":{\"metricset.module\":{\"query\":\"windows\",\"type\":\"phrase\"}}},\"$state\":{\"store\":\"appState\"}},{\"meta\":{\"negate\":false,\"index\":\"metricbeat-*\",\"type\":\"phrase\",\"key\":\"metricset.name\",\"value\":\"service\",\"params\":{\"query\":\"service\",\"type\":\"phrase\"},\"disabled\":false,\"alias\":null},\"query\":{\"match\":{\"metricset.name\":{\"query\":\"service\",\"type\":\"phrase\"}}},\"$state\":{\"store\":\"appState\"}}]}"
-      },
-      "sort": [
-        "@timestamp",
-        "desc"
-      ],
-      "title": "Services [Metricbeat Windows]",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "67e88e60-0005-11e7-aaf1-b342e4b94bb0",
-    "_type": "search",
-    "_source": {
-      "columns": [
-        "_source"
-      ],
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"metricbeat-*\",\"highlightAll\":true,\"query\":{\"query_string\":{\"query\":\"_exists_:mysql.status\",\"analyze_wildcard\":true}},\"filter\":[]}"
-      },
-      "sort": [
-        "@timestamp",
-        "desc"
-      ],
-      "title": "Metricbeat MySQL status",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Metricbeat-Rabbitmq",
-    "_type": "search",
-    "_source": {
-      "description": "",
-      "hits": 0,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"metricbeat-*\",\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647},\"query\":{\"query_string\":{\"query\":\"metricset.module:rabbitmq\",\"analyze_wildcard\":true}}}"
-      },
-      "sort": [
-        "@timestamp",
-        "desc"
-      ],
-      "title": "Metricbeat-Rabbitmq",
-      "version": 1
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "f12712d0-db70-11e8-86df-271c4963dd2a",
-    "_type": "search",
-    "_source": {
-      "title": "nomad example search",
-      "description": "",
-      "hits": 0,
-      "columns": [
-        "nomad.service",
-        "event",
-        "level",
-        "logger_name",
-        "nomad.archive_id"
-      ],
-      "sort": [
-        "@timestamp",
-        "desc"
-      ],
-      "version": 1,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"highlightAll\":true,\"version\":true,\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[{\"meta\":{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"type\":\"phrases\",\"key\":\"nomad.service.keyword\",\"value\":\"nomad_worker, nomad_api\",\"params\":[\"nomad_worker\",\"nomad_api\"],\"negate\":false,\"disabled\":false,\"alias\":null},\"query\":{\"bool\":{\"should\":[{\"match_phrase\":{\"nomad.service.keyword\":\"nomad_worker\"}},{\"match_phrase\":{\"nomad.service.keyword\":\"nomad_api\"}}],\"minimum_should_match\":1}},\"$state\":{\"store\":\"appState\"}}]}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "6b7b9a40-faa1-11e6-86b1-cd7735ff7e23",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\n  \"query\": {\n    \"query_string\": {\n      \"query\": \"*\"\n    }\n  },\n  \"filter\": []\n}"
-      },
-      "title": "Network Traffic (Packets) [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\n  \"title\": \"Mericbeat: Network Traffic (Packets)\",\n  \"type\": \"metrics\",\n  \"params\": {\n    \"id\": \"da1046f0-faa0-11e6-86b1-cd7735ff7e23\",\n    \"type\": \"timeseries\",\n    \"series\": [\n      {\n        \"id\": \"da1046f1-faa0-11e6-86b1-cd7735ff7e23\",\n        \"color\": \"rgba(0,156,224,1)\",\n        \"split_mode\": \"terms\",\n        \"metrics\": [\n          {\n            \"id\": \"da1046f2-faa0-11e6-86b1-cd7735ff7e23\",\n            \"type\": \"max\",\n            \"field\": \"system.network.in.packets\"\n          },\n          {\n            \"unit\": \"1s\",\n            \"id\": \"f41f9280-faa0-11e6-86b1-cd7735ff7e23\",\n            \"type\": \"derivative\",\n            \"field\": \"da1046f2-faa0-11e6-86b1-cd7735ff7e23\"\n          },\n          {\n            \"unit\": \"\",\n            \"id\": \"c0da3d80-1b93-11e7-8ada-3df93aab833e\",\n            \"type\": \"positive_only\",\n            \"field\": \"f41f9280-faa0-11e6-86b1-cd7735ff7e23\"\n          },\n          {\n            \"id\": \"ecaad010-2c2c-11e7-be71-3162da85303f\",\n            \"type\": \"series_agg\",\n            \"function\": \"sum\"\n          }\n        ],\n        \"seperate_axis\": 0,\n        \"axis_position\": \"right\",\n        \"formatter\": \"0.[00]a\",\n        \"chart_type\": \"line\",\n        \"line_width\": \"0\",\n        \"point_size\": \"0\",\n        \"fill\": \"1\",\n        \"stacked\": \"none\",\n        \"label\": \"Inbound\",\n        \"value_template\": \"{{value}}/s\",\n        \"terms_field\": \"system.network.name\"\n      },\n      {\n        \"id\": \"fbbd5720-faa0-11e6-86b1-cd7735ff7e23\",\n        \"color\": \"rgba(250,40,255,1)\",\n        \"split_mode\": \"terms\",\n        \"metrics\": [\n          {\n            \"id\": \"fbbd7e30-faa0-11e6-86b1-cd7735ff7e23\",\n            \"type\": \"max\",\n            \"field\": \"system.network.out.packets\"\n          },\n          {\n            \"unit\": \"1s\",\n            \"id\": \"fbbd7e31-faa0-11e6-86b1-cd7735ff7e23\",\n            \"type\": \"derivative\",\n            \"field\": \"fbbd7e30-faa0-11e6-86b1-cd7735ff7e23\"\n          },\n          {\n            \"script\": \"params.rate != null && params.rate > 0 ? params.rate * -1 : null\",\n            \"id\": \"17e597a0-faa1-11e6-86b1-cd7735ff7e23\",\n            \"type\": \"calculation\",\n            \"variables\": [\n              {\n                \"id\": \"1940bad0-faa1-11e6-86b1-cd7735ff7e23\",\n                \"name\": \"rate\",\n                \"field\": \"fbbd7e31-faa0-11e6-86b1-cd7735ff7e23\"\n              }\n            ]\n          },\n          {\n            \"id\": \"fe5fbdc0-2c2c-11e7-be71-3162da85303f\",\n            \"type\": \"series_agg\",\n            \"function\": \"sum\"\n          }\n        ],\n        \"seperate_axis\": 0,\n        \"axis_position\": \"right\",\n        \"formatter\": \"0.[00]a\",\n        \"chart_type\": \"line\",\n        \"line_width\": \"0\",\n        \"point_size\": \"0\",\n        \"fill\": \"1\",\n        \"stacked\": \"none\",\n        \"label\": \"Outbound\",\n        \"value_template\": \"{{value}}/s\",\n        \"terms_field\": \"system.network.name\"\n      }\n    ],\n    \"time_field\": \"@timestamp\",\n    \"index_pattern\": \"*\",\n    \"interval\": \"auto\",\n    \"axis_position\": \"left\",\n    \"axis_formatter\": \"number\",\n    \"show_legend\": 1,\n    \"filter\": \"-system.network.name:l*\"\n  },\n  \"aggs\": [],\n  \"listeners\": {}\n}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "4d546850-1b15-11e7-b09e-037021c4f8df",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "System Load [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"System Load [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"f6264ad0-1b14-11e7-b09e-037021c4f8df\",\"type\":\"timeseries\",\"series\":[{\"id\":\"f62671e0-1b14-11e7-b09e-037021c4f8df\",\"color\":\"rgba(115,216,255,1)\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"f62671e1-1b14-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.load.1\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"number\",\"chart_type\":\"line\",\"line_width\":\"3\",\"point_size\":1,\"fill\":\"0\",\"stacked\":\"none\",\"label\":\"1m\"},{\"id\":\"1c324850-1b15-11e7-b09e-037021c4f8df\",\"color\":\"rgba(0,156,224,1)\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"1c324851-1b15-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.load.5\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"number\",\"chart_type\":\"line\",\"line_width\":\"3\",\"point_size\":\"0\",\"fill\":\"0\",\"stacked\":\"none\",\"label\":\"5m\"},{\"id\":\"3287e740-1b15-11e7-b09e-037021c4f8df\",\"color\":\"rgba(0,98,177,1)\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"32880e50-1b15-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.load.15\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"number\",\"chart_type\":\"line\",\"line_width\":\"3\",\"point_size\":\"0\",\"fill\":\"0\",\"stacked\":\"none\",\"label\":\"15m\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "089b85d0-1b16-11e7-b09e-037021c4f8df",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\n  \"query\": {\n    \"query_string\": {\n      \"query\": \"*\"\n    }\n  },\n  \"filter\": []\n}"
-      },
-      "title": "Network Traffic (Bytes) [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\n  \"title\": \"Mericbeat: Network Traffic (Bytes)\",\n  \"type\": \"metrics\",\n  \"params\": {\n    \"id\": \"da1046f0-faa0-11e6-86b1-cd7735ff7e23\",\n    \"type\": \"timeseries\",\n    \"series\": [\n      {\n        \"id\": \"da1046f1-faa0-11e6-86b1-cd7735ff7e23\",\n        \"color\": \"rgba(0,156,224,1)\",\n        \"split_mode\": \"terms\",\n        \"metrics\": [\n          {\n            \"id\": \"da1046f2-faa0-11e6-86b1-cd7735ff7e23\",\n            \"type\": \"max\",\n            \"field\": \"system.network.in.bytes\"\n          },\n          {\n            \"unit\": \"1s\",\n            \"id\": \"f41f9280-faa0-11e6-86b1-cd7735ff7e23\",\n            \"type\": \"derivative\",\n            \"field\": \"da1046f2-faa0-11e6-86b1-cd7735ff7e23\"\n          },\n          {\n            \"unit\": \"\",\n            \"id\": \"a87398e0-1b93-11e7-8ada-3df93aab833e\",\n            \"type\": \"positive_only\",\n            \"field\": \"f41f9280-faa0-11e6-86b1-cd7735ff7e23\"\n          },\n          {\n            \"id\": \"2d533df0-2c2d-11e7-be71-3162da85303f\",\n            \"type\": \"series_agg\",\n            \"function\": \"sum\"\n          }\n        ],\n        \"seperate_axis\": 0,\n        \"axis_position\": \"right\",\n        \"formatter\": \"bytes\",\n        \"chart_type\": \"line\",\n        \"line_width\": \"0\",\n        \"point_size\": \"0\",\n        \"fill\": \"1\",\n        \"stacked\": \"none\",\n        \"label\": \"Inbound \",\n        \"value_template\": \"{{value}}/s\",\n        \"terms_field\": \"system.network.name\"\n      },\n      {\n        \"id\": \"fbbd5720-faa0-11e6-86b1-cd7735ff7e23\",\n        \"color\": \"rgba(250,40,255,1)\",\n        \"split_mode\": \"terms\",\n        \"metrics\": [\n          {\n            \"id\": \"fbbd7e30-faa0-11e6-86b1-cd7735ff7e23\",\n            \"type\": \"max\",\n            \"field\": \"system.network.out.bytes\"\n          },\n          {\n            \"unit\": \"1s\",\n            \"id\": \"fbbd7e31-faa0-11e6-86b1-cd7735ff7e23\",\n            \"type\": \"derivative\",\n            \"field\": \"fbbd7e30-faa0-11e6-86b1-cd7735ff7e23\"\n          },\n          {\n            \"script\": \"params.rate != null && params.rate > 0 ? params.rate * -1 : null\",\n            \"id\": \"17e597a0-faa1-11e6-86b1-cd7735ff7e23\",\n            \"type\": \"calculation\",\n            \"variables\": [\n              {\n                \"id\": \"1940bad0-faa1-11e6-86b1-cd7735ff7e23\",\n                \"name\": \"rate\",\n                \"field\": \"fbbd7e31-faa0-11e6-86b1-cd7735ff7e23\"\n              }\n            ]\n          },\n          {\n            \"id\": \"533da9b0-2c2d-11e7-be71-3162da85303f\",\n            \"type\": \"series_agg\",\n            \"function\": \"sum\"\n          }\n        ],\n        \"seperate_axis\": 0,\n        \"axis_position\": \"right\",\n        \"formatter\": \"bytes\",\n        \"chart_type\": \"line\",\n        \"line_width\": \"0\",\n        \"point_size\": \"0\",\n        \"fill\": \"1\",\n        \"stacked\": \"none\",\n        \"label\": \"Outbound \",\n        \"value_template\": \"{{value}}/s\",\n        \"terms_field\": \"system.network.name\"\n      }\n    ],\n    \"time_field\": \"@timestamp\",\n    \"index_pattern\": \"*\",\n    \"interval\": \"auto\",\n    \"axis_position\": \"left\",\n    \"axis_formatter\": \"number\",\n    \"show_legend\": 1,\n    \"filter\": \"-system.network.name:l*\"\n  },\n  \"aggs\": [],\n  \"listeners\": {}\n}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "bfa5e400-1b16-11e7-b09e-037021c4f8df",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Memory Usage [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Memory Usage [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"32f46f40-1b16-11e7-b09e-037021c4f8df\",\"type\":\"timeseries\",\"series\":[{\"id\":\"4ff61fd0-1b16-11e7-b09e-037021c4f8df\",\"color\":\"rgba(211,49,21,1)\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"4ff61fd1-1b16-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.memory.actual.used.bytes\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":\"0\",\"fill\":\"1\",\"stacked\":\"stacked\",\"label\":\"Used\"},{\"id\":\"753a6080-1b16-11e7-b09e-037021c4f8df\",\"color\":\"rgba(0,156,224,1)\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"753a6081-1b16-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.memory.actual.used.bytes\"},{\"id\":\"7c9d3f00-1b16-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.memory.used.bytes\"},{\"script\":\"params.actual != null && params.used != null ? params.used - params.actual : null\",\"id\":\"869cc160-1b16-11e7-b09e-037021c4f8df\",\"type\":\"calculation\",\"variables\":[{\"id\":\"890f9620-1b16-11e7-b09e-037021c4f8df\",\"name\":\"actual\",\"field\":\"753a6081-1b16-11e7-b09e-037021c4f8df\"},{\"id\":\"8f3ab7f0-1b16-11e7-b09e-037021c4f8df\",\"name\":\"used\",\"field\":\"7c9d3f00-1b16-11e7-b09e-037021c4f8df\"}]}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":\"0\",\"fill\":\"1\",\"stacked\":\"stacked\",\"label\":\"Cache\"},{\"id\":\"32f46f41-1b16-11e7-b09e-037021c4f8df\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"32f46f42-1b16-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.memory.free\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":\"0\",\"fill\":\"1\",\"stacked\":\"stacked\",\"label\":\"Free\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "e0f001c0-1b18-11e7-b09e-037021c4f8df",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Top Processes By CPU [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Top Processes By CPU [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"5f5b8d50-1b18-11e7-b09e-037021c4f8df\",\"type\":\"top_n\",\"series\":[{\"id\":\"5f5b8d51-1b18-11e7-b09e-037021c4f8df\",\"color\":\"#68BC00\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"5f5b8d52-1b18-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.process.cpu.total.pct\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"percent\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"terms_field\":\"system.process.name\",\"terms_order_by\":\"5f5b8d52-1b18-11e7-b09e-037021c4f8df\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"bar_color_rules\":[{\"value\":0,\"id\":\"60e11be0-1b18-11e7-b09e-037021c4f8df\",\"bar_color\":\"rgba(104,188,0,1)\",\"opperator\":\"gte\"}],\"drilldown_url\":\"\",\"filter\":\"\",\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "2e224660-1b19-11e7-b09e-037021c4f8df",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Processes By Memory [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Processes By Memory [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"edfceb30-1b18-11e7-b09e-037021c4f8df\",\"type\":\"top_n\",\"series\":[{\"id\":\"edfceb31-1b18-11e7-b09e-037021c4f8df\",\"color\":\"#68BC00\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"edfceb32-1b18-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.process.memory.rss.pct\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"percent\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"terms_field\":\"system.process.name\",\"terms_order_by\":\"edfceb32-1b18-11e7-b09e-037021c4f8df\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"bar_color_rules\":[{\"value\":0,\"id\":\"efb9b660-1b18-11e7-b09e-037021c4f8df\",\"bar_color\":\"rgba(104,188,0,1)\",\"opperator\":\"gte\"},{\"value\":0.7,\"id\":\"17fcb820-1b19-11e7-b09e-037021c4f8df\",\"bar_color\":\"rgba(254,146,0,1)\",\"opperator\":\"gte\"},{\"value\":0.85,\"id\":\"1dd61070-1b19-11e7-b09e-037021c4f8df\",\"bar_color\":\"rgba(211,49,21,1)\",\"opperator\":\"gte\"}],\"drilldown_url\":\"\",\"filter\":\"\",\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "ab2d1e90-1b1a-11e7-b09e-037021c4f8df",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "CPU Usage [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"CPU Usage [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"80a04950-1b19-11e7-b09e-037021c4f8df\",\"type\":\"timeseries\",\"series\":[{\"id\":\"80a04951-1b19-11e7-b09e-037021c4f8df\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"80a04952-1b19-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.cpu.user.pct\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"percent\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":\"0\",\"fill\":\"1\",\"stacked\":\"stacked\",\"label\":\"user\"},{\"id\":\"993acf30-1b19-11e7-b09e-037021c4f8df\",\"color\":\"rgba(211,49,21,1)\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"993acf31-1b19-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.cpu.system.pct\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"percent\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":\"0\",\"fill\":\"1\",\"stacked\":\"stacked\",\"label\":\"system\"},{\"id\":\"65ca35e0-1b1a-11e7-b09e-037021c4f8df\",\"color\":\"rgba(123,100,255,1)\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"65ca5cf0-1b1a-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.cpu.nice.pct\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"percent\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":\"0\",\"fill\":\"1\",\"stacked\":\"stacked\",\"label\":\"nice\"},{\"id\":\"741b5f20-1b1a-11e7-b09e-037021c4f8df\",\"color\":\"rgba(226,115,0,1)\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"741b5f21-1b1a-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.cpu.irq.pct\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"percent\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":\"0\",\"fill\":\"1\",\"stacked\":\"stacked\",\"label\":\"irq\"},{\"id\":\"2efc5d40-1b1a-11e7-b09e-037021c4f8df\",\"color\":\"rgba(176,188,0,1)\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"2efc5d41-1b1a-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.cpu.softirq.pct\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"percent\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":\"0\",\"fill\":\"1\",\"stacked\":\"stacked\",\"label\":\"softirq\"},{\"id\":\"ae644a30-1b19-11e7-b09e-037021c4f8df\",\"color\":\"rgba(15,20,25,1)\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"ae644a31-1b19-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.cpu.iowait.pct\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"percent\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":\"0\",\"fill\":\"1\",\"stacked\":\"stacked\",\"label\":\"iowait\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "4e4bb1e0-1b1b-11e7-b09e-037021c4f8df",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Disk IO (Bytes) [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Disk IO (Bytes) [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"d3c67db0-1b1a-11e7-b09e-037021c4f8df\",\"type\":\"timeseries\",\"series\":[{\"id\":\"d3c67db1-1b1a-11e7-b09e-037021c4f8df\",\"color\":\"rgba(22,165,165,1)\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"d3c67db2-1b1a-11e7-b09e-037021c4f8df\",\"type\":\"max\",\"field\":\"system.diskio.read.bytes\"},{\"unit\":\"1s\",\"id\":\"f55b9910-1b1a-11e7-b09e-037021c4f8df\",\"type\":\"derivative\",\"field\":\"d3c67db2-1b1a-11e7-b09e-037021c4f8df\"},{\"unit\":\"\",\"id\":\"dcbbb100-1b93-11e7-8ada-3df93aab833e\",\"type\":\"positive_only\",\"field\":\"f55b9910-1b1a-11e7-b09e-037021c4f8df\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":\"0\",\"fill\":\"1\",\"stacked\":\"none\",\"label\":\"reads\",\"value_template\":\"{{value}}/s\"},{\"id\":\"144124d0-1b1b-11e7-b09e-037021c4f8df\",\"color\":\"rgba(251,158,0,1)\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"144124d1-1b1b-11e7-b09e-037021c4f8df\",\"type\":\"max\",\"field\":\"system.diskio.write.bytes\"},{\"unit\":\"1s\",\"id\":\"144124d2-1b1b-11e7-b09e-037021c4f8df\",\"type\":\"derivative\",\"field\":\"144124d1-1b1b-11e7-b09e-037021c4f8df\"},{\"script\":\"params.rate > 0 ? params.rate * -1 : 0\",\"id\":\"144124d4-1b1b-11e7-b09e-037021c4f8df\",\"type\":\"calculation\",\"variables\":[{\"id\":\"144124d3-1b1b-11e7-b09e-037021c4f8df\",\"name\":\"rate\",\"field\":\"144124d2-1b1b-11e7-b09e-037021c4f8df\"}]}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":\"0\",\"fill\":\"1\",\"stacked\":\"none\",\"label\":\"writes\",\"value_template\":\"{{value}}/s\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"filter\":\"\",\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "26732e20-1b91-11e7-bec4-a5e9ec5cab8b",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Load Gauge [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Load Gauge [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"fdcc6180-1b90-11e7-bec4-a5e9ec5cab8b\",\"type\":\"gauge\",\"series\":[{\"id\":\"fdcc6181-1b90-11e7-bec4-a5e9ec5cab8b\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"fdcc6182-1b90-11e7-bec4-a5e9ec5cab8b\",\"type\":\"avg\",\"field\":\"system.load.5\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"number\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"label\":\"5m Load\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"background_color_rules\":[{\"id\":\"feefabd0-1b90-11e7-bec4-a5e9ec5cab8b\"}],\"gauge_color_rules\":[{\"id\":\"ffd94880-1b90-11e7-bec4-a5e9ec5cab8b\"}],\"gauge_width\":10,\"gauge_inner_width\":10,\"gauge_style\":\"half\",\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "34f97ee0-1b96-11e7-8ada-3df93aab833e",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Disk Usage [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Disk Usage [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"axis_formatter\":\"number\",\"axis_position\":\"left\",\"bar_color_rules\":[{\"bar_color\":\"rgba(104,188,0,1)\",\"id\":\"bf525310-1b95-11e7-8ada-3df93aab833e\",\"opperator\":\"gte\",\"value\":0},{\"bar_color\":\"rgba(254,146,0,1)\",\"id\":\"125fc4c0-1b96-11e7-8ada-3df93aab833e\",\"opperator\":\"gte\",\"value\":0.7},{\"bar_color\":\"rgba(211,49,21,1)\",\"id\":\"1a5c7240-1b96-11e7-8ada-3df93aab833e\",\"opperator\":\"gte\",\"value\":0.85}],\"drilldown_url\":\"\",\"filter\":\"-system.filesystem.mount_point:\\\\/run* AND -system.filesystem.mount_point:\\\\/sys* AND -system.filesystem.mount_point:\\\\/dev* AND -system.filesystem.mount_point:\\\\/proc* AND -system.filesystem.mount_point:\\\\/var* AND -system.filesystem.mount_point:\\\\/boot\",\"id\":\"9f7e48a0-1b95-11e7-8ada-3df93aab833e\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"series\":[{\"axis_position\":\"right\",\"chart_type\":\"line\",\"color\":\"#68BC00\",\"fill\":0.5,\"formatter\":\"percent\",\"id\":\"9f7e48a1-1b95-11e7-8ada-3df93aab833e\",\"line_width\":1,\"metrics\":[{\"field\":\"system.filesystem.used.pct\",\"id\":\"9f7e48a2-1b95-11e7-8ada-3df93aab833e\",\"type\":\"avg\"}],\"point_size\":1,\"seperate_axis\":0,\"split_mode\":\"terms\",\"stacked\":\"none\",\"terms_field\":\"system.filesystem.mount_point\"}],\"show_legend\":1,\"time_field\":\"@timestamp\",\"type\":\"top_n\",\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "19e123b0-4d5a-11e7-aee5-fdc812cc3bec",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Swap usage [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Swap usage [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"cee2fd20-4d59-11e7-aee5-fdc812cc3bec\",\"type\":\"gauge\",\"series\":[{\"id\":\"cee2fd21-4d59-11e7-aee5-fdc812cc3bec\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"cee2fd22-4d59-11e7-aee5-fdc812cc3bec\",\"type\":\"avg\",\"field\":\"system.memory.swap.used.pct\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"percent\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"label\":\"Swap usage\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"gauge_color_rules\":[{\"value\":0,\"id\":\"d17c1e90-4d59-11e7-aee5-fdc812cc3bec\",\"gauge\":\"rgba(104,188,0,1)\",\"opperator\":\"gte\"},{\"value\":0.7,\"id\":\"fc1d3490-4d59-11e7-aee5-fdc812cc3bec\",\"gauge\":\"rgba(251,158,0,1)\",\"opperator\":\"gte\"},{\"value\":0.85,\"id\":\"0e204240-4d5a-11e7-aee5-fdc812cc3bec\",\"gauge\":\"rgba(211,49,21,1)\",\"opperator\":\"gte\"}],\"gauge_width\":10,\"gauge_inner_width\":10,\"gauge_style\":\"half\",\"gauge_max\":\"\",\"filter\":\"\",\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "d2e80340-4d5c-11e7-aa29-87a97a796de6",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "Memory usage vs total",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Memory usage vs total\",\"type\":\"metrics\",\"params\":{\"id\":\"6bc65720-4d5c-11e7-aa29-87a97a796de6\",\"type\":\"metric\",\"series\":[{\"id\":\"6bc65721-4d5c-11e7-aa29-87a97a796de6\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"6bc65722-4d5c-11e7-aa29-87a97a796de6\",\"type\":\"avg\",\"field\":\"system.memory.actual.used.bytes\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"label\":\"Memory usage\"},{\"id\":\"b8fe6820-4d5c-11e7-aa29-87a97a796de6\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"b8fe6821-4d5c-11e7-aa29-87a97a796de6\",\"type\":\"avg\",\"field\":\"system.memory.total\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"label\":\"Total Memory\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"background_color_rules\":[{\"id\":\"6f7618b0-4d5c-11e7-aa29-87a97a796de6\"}]},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "96976150-4d5d-11e7-aa29-87a97a796de6",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Packetloss [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Packetloss [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"6984af10-4d5d-11e7-aa29-87a97a796de6\",\"type\":\"metric\",\"series\":[{\"id\":\"6984af11-4d5d-11e7-aa29-87a97a796de6\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"6984af12-4d5d-11e7-aa29-87a97a796de6\",\"type\":\"max\",\"field\":\"system.network.in.dropped\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"number\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"label\":\"In Packetloss\"},{\"id\":\"ac2e6b30-4d5d-11e7-aa29-87a97a796de6\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"ac2e6b31-4d5d-11e7-aa29-87a97a796de6\",\"type\":\"max\",\"field\":\"system.network.out.dropped\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"number\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"label\":\"Out Packetloss\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"background_color_rules\":[{\"id\":\"6ba9b1f0-4d5d-11e7-aa29-87a97a796de6\"}],\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "99381c80-4d60-11e7-9a4c-ed99bbcaa42b",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Interfaces by Incoming traffic [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Interfaces by Incoming traffic [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"42ceae90-4d60-11e7-9a4c-ed99bbcaa42b\",\"type\":\"top_n\",\"series\":[{\"id\":\"42ced5a0-4d60-11e7-9a4c-ed99bbcaa42b\",\"color\":\"#68BC00\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"42ced5a1-4d60-11e7-9a4c-ed99bbcaa42b\",\"type\":\"avg\",\"field\":\"system.network.in.bytes\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"label\":\"Interfaces by Incoming traffic\",\"terms_field\":\"system.network.name\",\"terms_order_by\":\"42ced5a1-4d60-11e7-9a4c-ed99bbcaa42b\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"bar_color_rules\":[{\"id\":\"44596d40-4d60-11e7-9a4c-ed99bbcaa42b\"}],\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "c5e3cf90-4d60-11e7-9a4c-ed99bbcaa42b",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Interfaces by Outgoing traffic [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Interfaces by Outgoing traffic [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"9cdba910-4d60-11e7-9a4c-ed99bbcaa42b\",\"type\":\"top_n\",\"series\":[{\"id\":\"9cdba911-4d60-11e7-9a4c-ed99bbcaa42b\",\"color\":\"#68BC00\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"9cdba912-4d60-11e7-9a4c-ed99bbcaa42b\",\"type\":\"avg\",\"field\":\"system.network.out.bytes\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"terms_field\":\"system.network.name\",\"terms_order_by\":\"9cdba912-4d60-11e7-9a4c-ed99bbcaa42b\",\"label\":\"Interfaces by Outgoing traffic\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"bar_color_rules\":[{\"id\":\"9db20be0-4d60-11e7-9a4c-ed99bbcaa42b\"}],\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "590a60f0-5d87-11e7-8884-1bb4c3b890e4",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\n  \"index\": \"metricbeat-*\",\n  \"query\": {\n    \"query_string\": {\n      \"query\": \"*\",\n      \"analyze_wildcard\": true\n    }\n  },\n  \"filter\": []\n}"
-      },
-      "title": "Number of processes [Metricbeat System]",
-      "uiStateJSON": "{\n  \"vis\": {\n    \"defaultColors\": {\n      \"0 - 100\": \"rgb(0,104,55)\"\n    }\n  }\n}",
-      "version": 1,
-      "visState": "{\n  \"title\": \"Number of processes\",\n  \"type\": \"metric\",\n  \"params\": {\n    \"addLegend\": false,\n    \"addTooltip\": true,\n    \"gauge\": {\n      \"autoExtend\": false,\n      \"backStyle\": \"Full\",\n      \"colorSchema\": \"Green to Red\",\n      \"colorsRange\": [\n        {\n          \"from\": 0,\n          \"to\": 100\n        }\n      ],\n      \"gaugeColorMode\": \"None\",\n      \"gaugeStyle\": \"Full\",\n      \"gaugeType\": \"Metric\",\n      \"invertColors\": false,\n      \"labels\": {\n        \"color\": \"black\",\n        \"show\": true\n      },\n      \"orientation\": \"vertical\",\n      \"percentageMode\": false,\n      \"scale\": {\n        \"color\": \"#333\",\n        \"labels\": false,\n        \"show\": false,\n        \"width\": 2\n      },\n      \"style\": {\n        \"bgColor\": false,\n        \"bgFill\": \"#000\",\n        \"fontSize\": 60,\n        \"labelColor\": false,\n        \"subText\": \"\"\n      },\n      \"type\": \"simple\",\n      \"useRange\": false,\n      \"verticalSplit\": false\n    },\n    \"type\": \"gauge\"\n  },\n  \"aggs\": [\n    {\n      \"id\": \"1\",\n      \"enabled\": true,\n      \"type\": \"cardinality\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"system.process.pid\",\n        \"customLabel\": \"Processes\"\n      }\n    }\n  ],\n  \"listeners\": {}\n}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "3d65d450-a9c3-11e7-af20-67db8aecb295",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{}"
-      },
-      "title": "Tip [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Tip [Metricbeat System]\",\"type\":\"markdown\",\"params\":{\"fontSize\":12,\"markdown\":\"**TIP:** To select another host, go to the [System Overview](#/dashboard/Metricbeat-system-overview) dashboard and double-click a host name.\"},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "022a54c0-2bf5-11e7-859b-f78b612cde28",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "Available pods per deployment [Metricbeat Kubernetes]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Available pods per deployment [Metricbeat Kubernetes]\",\"type\":\"metrics\",\"params\":{\"axis_formatter\":\"number\",\"axis_position\":\"left\",\"filter\":\"metricset.module:kubernetes AND metricset.name:state_deployment\",\"id\":\"117fadf0-30df-11e7-8df8-6d3604a72912\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"series\":[{\"axis_position\":\"right\",\"chart_type\":\"line\",\"color\":\"rgba(104,188,0,1)\",\"fill\":0.5,\"formatter\":\"number\",\"id\":\"64456840-30df-11e7-8df8-6d3604a72912\",\"label\":\"Available pods\",\"line_width\":1,\"metrics\":[{\"field\":\"kubernetes.deployment.replicas.available\",\"id\":\"64456841-30df-11e7-8df8-6d3604a72912\",\"type\":\"sum\"}],\"point_size\":1,\"seperate_axis\":0,\"split_filters\":[{\"color\":\"#68BC00\",\"id\":\"53d35ad0-30df-11e7-8df8-6d3604a72912\"}],\"split_mode\":\"terms\",\"stacked\":\"stacked\",\"terms_field\":\"kubernetes.deployment.name\",\"terms_size\":\"10000\"}],\"show_legend\":1,\"time_field\":\"@timestamp\",\"type\":\"timeseries\"},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "44f12b40-2bf4-11e7-859b-f78b612cde28",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "CPU usage by node  [Metricbeat Kubernetes]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"CPU usage by node  [Metricbeat Kubernetes]\",\"type\":\"metrics\",\"params\":{\"id\":\"0d5c6b10-2bf2-11e7-859b-f78b612cde28\",\"type\":\"timeseries\",\"series\":[{\"id\":\"0d5c9220-2bf2-11e7-859b-f78b612cde28\",\"color\":\"#68BC00\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"0d5c9221-2bf2-11e7-859b-f78b612cde28\",\"type\":\"sum\",\"field\":\"kubernetes.container.cpu.usage.nanocores\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"0.0a\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":\"0.5\",\"stacked\":\"none\",\"terms_field\":\"kubernetes.node.name\",\"terms_size\":\"10000\",\"terms_order_by\":\"0d5c9221-2bf2-11e7-859b-f78b612cde28\",\"value_template\":\"{{value}} nanocores\",\"override_index_pattern\":0,\"series_interval\":\"10s\",\"series_time_field\":\"@timestamp\"},{\"id\":\"22f65d40-31a7-11e7-84cc-096d2b38e6e5\",\"color\":\"rgba(211,49,21,1)\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"22f65d41-31a7-11e7-84cc-096d2b38e6e5\",\"type\":\"avg\",\"field\":\"kubernetes.node.cpu.capacity.cores\"},{\"script\":\"params.cores * 1000000000\",\"id\":\"4af4c390-34d6-11e7-be88-cb6a123dc1bb\",\"type\":\"calculation\",\"variables\":[{\"id\":\"4cd32080-34d6-11e7-be88-cb6a123dc1bb\",\"name\":\"cores\",\"field\":\"22f65d41-31a7-11e7-84cc-096d2b38e6e5\"}]}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"0.0a\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":\"0\",\"fill\":\"0\",\"stacked\":\"none\",\"terms_field\":\"kubernetes.node.name\",\"terms_size\":\"10000\",\"terms_order_by\":\"22f65d41-31a7-11e7-84cc-096d2b38e6e5\",\"value_template\":\"{{value}} nanocores\",\"hide_in_legend\":1,\"label\":\"\",\"override_index_pattern\":0,\"series_interval\":\"10s\",\"series_time_field\":\"@timestamp\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"background_color_rules\":[{\"id\":\"12c1f2f0-2bf2-11e7-859b-f78b612cde28\"}],\"bar_color_rules\":[{\"id\":\"1373ddd0-2bf2-11e7-859b-f78b612cde28\"}],\"gauge_color_rules\":[{\"id\":\"140e4910-2bf2-11e7-859b-f78b612cde28\"}],\"gauge_width\":10,\"gauge_inner_width\":10,\"gauge_style\":\"half\",\"filter\":\"metricset.module:kubernetes AND (metricset.name:container OR metricset.name:state_node)\"},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "cd059410-2bfb-11e7-859b-f78b612cde28",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "Kubernetes - Deployments",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Kubernetes - Deployments\",\"type\":\"metrics\",\"params\":{\"id\":\"4c4690b0-30e0-11e7-8df8-6d3604a72912\",\"type\":\"metric\",\"series\":[{\"id\":\"4c4690b1-30e0-11e7-8df8-6d3604a72912\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"4c4690b2-30e0-11e7-8df8-6d3604a72912\",\"type\":\"cardinality\",\"field\":\"kubernetes.deployment.name\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"number\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"terms_field\":\"kubernetes.deployment.name\",\"label\":\"Deployments\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"filter\":\"metricset.module:kubernetes AND metricset.name:state_deployment\",\"background_color_rules\":[{\"id\":\"67ee7da0-30e0-11e7-8df8-6d3604a72912\"}],\"bar_color_rules\":[{\"id\":\"68cdba10-30e0-11e7-8df8-6d3604a72912\"}],\"gauge_color_rules\":[{\"id\":\"69765620-30e0-11e7-8df8-6d3604a72912\"}],\"gauge_width\":10,\"gauge_inner_width\":10,\"gauge_style\":\"half\"},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "e1018b90-2bfb-11e7-859b-f78b612cde28",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "Kubernetes - Desired pods",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Kubernetes - Desired pods\",\"type\":\"metrics\",\"params\":{\"id\":\"2fe9d3b0-30d5-11e7-8df8-6d3604a72912\",\"type\":\"metric\",\"series\":[{\"id\":\"2fe9d3b1-30d5-11e7-8df8-6d3604a72912\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"54cf79a0-30d5-11e7-8df8-6d3604a72912\",\"type\":\"sum\",\"field\":\"kubernetes.deployment.replicas.desired\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"number\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"label\":\"Desired Pods\",\"override_index_pattern\":1,\"series_time_field\":\"@timestamp\",\"series_interval\":\"10s\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"background_color_rules\":[{\"id\":\"508ffb30-30d5-11e7-8df8-6d3604a72912\"}],\"gauge_color_rules\":[{\"id\":\"50f9b980-30d5-11e7-8df8-6d3604a72912\"}],\"gauge_width\":\"10\",\"gauge_inner_width\":\"10\",\"gauge_style\":\"half\",\"bar_color_rules\":[{\"id\":\"674d83b0-30d5-11e7-8df8-6d3604a72912\"}],\"gauge_max\":\"5\",\"filter\":\"metricset.module:kubernetes AND metricset.name:state_deployment\"},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "d6564360-2bfc-11e7-859b-f78b612cde28",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "Memory usage by node  [Metricbeat Kubernetes]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Memory usage by node  [Metricbeat Kubernetes]\",\"type\":\"metrics\",\"params\":{\"id\":\"0d5c6b10-2bf2-11e7-859b-f78b612cde28\",\"type\":\"timeseries\",\"series\":[{\"id\":\"0d5c9220-2bf2-11e7-859b-f78b612cde28\",\"color\":\"#68BC00\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"0d5c9221-2bf2-11e7-859b-f78b612cde28\",\"type\":\"sum\",\"field\":\"kubernetes.container.memory.usage.bytes\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"terms_field\":\"kubernetes.node.name\",\"terms_size\":\"10000\",\"terms_order_by\":\"0d5c9221-2bf2-11e7-859b-f78b612cde28\"},{\"id\":\"8ba3b270-31a7-11e7-84cc-096d2b38e6e5\",\"color\":\"rgba(211,49,21,1)\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"8ba3b271-31a7-11e7-84cc-096d2b38e6e5\",\"type\":\"sum\",\"field\":\"kubernetes.node.memory.capacity.bytes\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":\"0\",\"fill\":\"0\",\"stacked\":\"none\",\"terms_field\":\"kubernetes.node.name\",\"terms_size\":\"10000\",\"terms_order_by\":\"8ba3b271-31a7-11e7-84cc-096d2b38e6e5\",\"hide_in_legend\":1,\"label\":\"Node capacity\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"background_color_rules\":[{\"id\":\"12c1f2f0-2bf2-11e7-859b-f78b612cde28\"}],\"bar_color_rules\":[{\"id\":\"1373ddd0-2bf2-11e7-859b-f78b612cde28\"}],\"gauge_color_rules\":[{\"id\":\"140e4910-2bf2-11e7-859b-f78b612cde28\"}],\"gauge_width\":10,\"gauge_inner_width\":10,\"gauge_style\":\"half\",\"filter\":\"metricset.module:kubernetes AND (metricset.name:container OR metricset.name:state_node)\"},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "16fa4470-2bfd-11e7-859b-f78b612cde28",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "Network in by node  [Metricbeat Kubernetes]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Network in by node  [Metricbeat Kubernetes]\",\"type\":\"metrics\",\"params\":{\"id\":\"0d5c6b10-2bf2-11e7-859b-f78b612cde28\",\"type\":\"timeseries\",\"series\":[{\"id\":\"0d5c9220-2bf2-11e7-859b-f78b612cde28\",\"color\":\"rgba(104,188,0,1)\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"0d5c9221-2bf2-11e7-859b-f78b612cde28\",\"type\":\"sum\",\"field\":\"kubernetes.pod.network.rx.bytes\"},{\"unit\":\"\",\"id\":\"494fc310-2bf7-11e7-859b-f78b612cde28\",\"type\":\"derivative\",\"field\":\"0d5c9221-2bf2-11e7-859b-f78b612cde28\"},{\"unit\":\"\",\"id\":\"37c72a70-3598-11e7-aa4a-8313a0c92a88\",\"type\":\"positive_only\",\"field\":\"494fc310-2bf7-11e7-859b-f78b612cde28\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"stacked\",\"terms_field\":\"kubernetes.node.name\",\"terms_size\":\"100000\",\"terms_order_by\":\"0d5c9221-2bf2-11e7-859b-f78b612cde28\",\"label\":\"\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"background_color_rules\":[{\"id\":\"12c1f2f0-2bf2-11e7-859b-f78b612cde28\"}],\"bar_color_rules\":[{\"id\":\"1373ddd0-2bf2-11e7-859b-f78b612cde28\"}],\"gauge_color_rules\":[{\"id\":\"140e4910-2bf2-11e7-859b-f78b612cde28\"}],\"gauge_width\":10,\"gauge_inner_width\":10,\"gauge_style\":\"half\",\"filter\":\"metricset.module:kubernetes AND metricset.name:pod\"},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "294546b0-30d6-11e7-8df8-6d3604a72912",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "Network out by node  [Metricbeat Kubernetes]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Network out by node  [Metricbeat Kubernetes]\",\"type\":\"metrics\",\"params\":{\"id\":\"0d5c6b10-2bf2-11e7-859b-f78b612cde28\",\"type\":\"timeseries\",\"series\":[{\"id\":\"0d5c9220-2bf2-11e7-859b-f78b612cde28\",\"color\":\"rgba(104,188,0,1)\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"0d5c9221-2bf2-11e7-859b-f78b612cde28\",\"type\":\"sum\",\"field\":\"kubernetes.pod.network.tx.bytes\"},{\"unit\":\"\",\"id\":\"494fc310-2bf7-11e7-859b-f78b612cde28\",\"type\":\"derivative\",\"field\":\"0d5c9221-2bf2-11e7-859b-f78b612cde28\"},{\"unit\":\"\",\"id\":\"244c70e0-3598-11e7-aa4a-8313a0c92a88\",\"type\":\"positive_only\",\"field\":\"494fc310-2bf7-11e7-859b-f78b612cde28\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"stacked\",\"terms_field\":\"kubernetes.node.name\",\"terms_size\":\"10000\",\"terms_order_by\":\"0d5c9221-2bf2-11e7-859b-f78b612cde28\",\"label\":\"\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"background_color_rules\":[{\"id\":\"12c1f2f0-2bf2-11e7-859b-f78b612cde28\"}],\"bar_color_rules\":[{\"id\":\"1373ddd0-2bf2-11e7-859b-f78b612cde28\"}],\"gauge_color_rules\":[{\"id\":\"140e4910-2bf2-11e7-859b-f78b612cde28\"}],\"gauge_width\":10,\"gauge_inner_width\":10,\"gauge_style\":\"half\",\"filter\":\"metricset.module:kubernetes AND metricset.name:pod\"},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "408fccf0-30d6-11e7-8df8-6d3604a72912",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "Kubernetes - Nodes",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Kubernetes - Nodes\",\"type\":\"metrics\",\"params\":{\"id\":\"4c4690b0-30e0-11e7-8df8-6d3604a72912\",\"type\":\"metric\",\"series\":[{\"id\":\"4c4690b1-30e0-11e7-8df8-6d3604a72912\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"4c4690b2-30e0-11e7-8df8-6d3604a72912\",\"type\":\"cardinality\",\"field\":\"kubernetes.node.name\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"number\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"terms_field\":\"kubernetes.deployment.name\",\"label\":\"Nodes\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"filter\":\"metricset.module:kubernetes AND metricset.name:state_node\",\"background_color_rules\":[{\"id\":\"67ee7da0-30e0-11e7-8df8-6d3604a72912\"}],\"bar_color_rules\":[{\"id\":\"68cdba10-30e0-11e7-8df8-6d3604a72912\"}],\"gauge_color_rules\":[{\"id\":\"69765620-30e0-11e7-8df8-6d3604a72912\"}],\"gauge_width\":10,\"gauge_inner_width\":10,\"gauge_style\":\"half\"},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "58e644f0-30d6-11e7-8df8-6d3604a72912",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "Top CPU intensive pods  [Metricbeat Kubernetes]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Top CPU intensive pods  [Metricbeat Kubernetes]\",\"type\":\"metrics\",\"params\":{\"id\":\"5d3692a0-2bfc-11e7-859b-f78b612cde28\",\"type\":\"top_n\",\"series\":[{\"id\":\"5d3692a1-2bfc-11e7-859b-f78b612cde28\",\"color\":\"#68BC00\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"5d3692a2-2bfc-11e7-859b-f78b612cde28\",\"type\":\"sum\",\"field\":\"kubernetes.container.cpu.usage.core.ns\"},{\"unit\":\"1s\",\"id\":\"6c905240-2bfc-11e7-859b-f78b612cde28\",\"type\":\"derivative\",\"field\":\"5d3692a2-2bfc-11e7-859b-f78b612cde28\"},{\"unit\":\"\",\"id\":\"9a51f710-359d-11e7-aa4a-8313a0c92a88\",\"type\":\"positive_only\",\"field\":\"6c905240-2bfc-11e7-859b-f78b612cde28\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"0.0 a\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"terms_field\":\"kubernetes.pod.name\",\"terms_order_by\":\"5d3692a2-2bfc-11e7-859b-f78b612cde28\",\"value_template\":\"{{value}} ns\",\"offset_time\":\"\",\"override_index_pattern\":0}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"bar_color_rules\":[{\"id\":\"802104d0-2bfc-11e7-859b-f78b612cde28\"}],\"filter\":\"metricset.module:kubernetes AND metricset.name:container\"},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "a4c9d360-30df-11e7-8df8-6d3604a72912",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "Top memory intensive pods  [Metricbeat Kubernetes]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Top memory intensive pods  [Metricbeat Kubernetes]\",\"type\":\"metrics\",\"params\":{\"id\":\"5d3692a0-2bfc-11e7-859b-f78b612cde28\",\"type\":\"top_n\",\"series\":[{\"id\":\"5d3692a1-2bfc-11e7-859b-f78b612cde28\",\"color\":\"#68BC00\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"5d3692a2-2bfc-11e7-859b-f78b612cde28\",\"type\":\"sum\",\"field\":\"kubernetes.container.memory.usage.bytes\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"bytes\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"terms_field\":\"kubernetes.pod.name\",\"terms_order_by\":\"5d3692a2-2bfc-11e7-859b-f78b612cde28\",\"value_template\":\"\",\"offset_time\":\"\",\"override_index_pattern\":0,\"terms_size\":\"10\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"bar_color_rules\":[{\"id\":\"802104d0-2bfc-11e7-859b-f78b612cde28\"}],\"filter\":\"metricset.module:kubernetes AND metricset.name:container\"},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "174a6ad0-30e0-11e7-8df8-6d3604a72912",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "Kubernetes - Unavailable pods",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Kubernetes - Unavailable pods\",\"type\":\"metrics\",\"params\":{\"id\":\"2fe9d3b0-30d5-11e7-8df8-6d3604a72912\",\"type\":\"metric\",\"series\":[{\"id\":\"2fe9d3b1-30d5-11e7-8df8-6d3604a72912\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"54cf79a0-30d5-11e7-8df8-6d3604a72912\",\"type\":\"sum\",\"field\":\"kubernetes.deployment.replicas.unavailable\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"number\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"label\":\"Unavailable Pods\",\"override_index_pattern\":1,\"series_interval\":\"10s\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"background_color_rules\":[{\"id\":\"508ffb30-30d5-11e7-8df8-6d3604a72912\"}],\"gauge_color_rules\":[{\"id\":\"50f9b980-30d5-11e7-8df8-6d3604a72912\"}],\"gauge_width\":\"10\",\"gauge_inner_width\":\"10\",\"gauge_style\":\"half\",\"bar_color_rules\":[{\"id\":\"674d83b0-30d5-11e7-8df8-6d3604a72912\"}],\"gauge_max\":\"\",\"filter\":\"metricset.module:kubernetes AND metricset.name:state_deployment\"},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "7aac4fd0-30e0-11e7-8df8-6d3604a72912",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "Unavailable pods per deployment [Metricbeat Kubernetes]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Unavailable pods per deployment [Metricbeat Kubernetes]\",\"type\":\"metrics\",\"params\":{\"id\":\"117fadf0-30df-11e7-8df8-6d3604a72912\",\"type\":\"timeseries\",\"series\":[{\"id\":\"64456840-30df-11e7-8df8-6d3604a72912\",\"color\":\"rgba(254,146,0,1)\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"64456841-30df-11e7-8df8-6d3604a72912\",\"type\":\"sum\",\"field\":\"kubernetes.deployment.replicas.unavailable\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"number\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"stacked\",\"split_filters\":[{\"color\":\"#68BC00\",\"id\":\"53d35ad0-30df-11e7-8df8-6d3604a72912\"}],\"terms_field\":\"kubernetes.deployment.name\",\"label\":\"Unavailable pods\",\"terms_size\":\"10000\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"filter\":\"metricset.module:kubernetes AND metricset.name:state_deployment\"},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "da1ff7c0-30ed-11e7-b9e5-2b5b07213ab3",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\"}},\"filter\":[]}"
-      },
-      "title": "Kubernetes - Available pods",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Kubernetes - Available pods\",\"type\":\"metrics\",\"params\":{\"id\":\"2fe9d3b0-30d5-11e7-8df8-6d3604a72912\",\"type\":\"metric\",\"series\":[{\"id\":\"2fe9d3b1-30d5-11e7-8df8-6d3604a72912\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"54cf79a0-30d5-11e7-8df8-6d3604a72912\",\"type\":\"sum\",\"field\":\"kubernetes.deployment.replicas.available\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"number\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"label\":\"Available Pods\",\"override_index_pattern\":1,\"series_time_field\":\"@timestamp\",\"series_index_pattern\":\"*\",\"series_interval\":\"10s\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"background_color_rules\":[{\"id\":\"508ffb30-30d5-11e7-8df8-6d3604a72912\"}],\"gauge_color_rules\":[{\"id\":\"50f9b980-30d5-11e7-8df8-6d3604a72912\"}],\"gauge_width\":\"10\",\"gauge_inner_width\":\"10\",\"gauge_style\":\"half\",\"bar_color_rules\":[{\"id\":\"674d83b0-30d5-11e7-8df8-6d3604a72912\"}],\"gauge_max\":\"5\",\"filter\":\"metricset.module:kubernetes AND metricset.name:state_deployment\"},\"aggs\":[],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Docker-Number-of-Containers",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "Metricbeat-Docker",
-      "title": "Number of Containers [Metricbeat Docker]",
-      "uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}}",
-      "version": 1,
-      "visState": "{\"aggs\":[{\"enabled\":true,\"id\":\"2\",\"params\":{\"customLabel\":\"Running\",\"field\":\"docker.info.containers.running\"},\"schema\":\"metric\",\"type\":\"max\"},{\"enabled\":true,\"id\":\"3\",\"params\":{\"customLabel\":\"Paused\",\"field\":\"docker.info.containers.paused\"},\"schema\":\"metric\",\"type\":\"max\"},{\"enabled\":true,\"id\":\"4\",\"params\":{\"customLabel\":\"Stopped\",\"field\":\"docker.info.containers.stopped\"},\"schema\":\"metric\",\"type\":\"max\"}],\"listeners\":{},\"params\":{\"addLegend\":false,\"addTooltip\":true,\"fontSize\":\"36\",\"gauge\":{\"autoExtend\":false,\"backStyle\":\"Full\",\"colorSchema\":\"Green to Red\",\"colorsRange\":[{\"from\":0,\"to\":100}],\"gaugeColorMode\":\"None\",\"gaugeStyle\":\"Full\",\"gaugeType\":\"Metric\",\"invertColors\":false,\"labels\":{\"color\":\"black\",\"show\":true},\"orientation\":\"vertical\",\"percentageMode\":false,\"scale\":{\"color\":\"#333\",\"labels\":false,\"show\":false,\"width\":2},\"style\":{\"bgColor\":false,\"fontSize\":60,\"labelColor\":false,\"subText\":\"\"},\"type\":\"simple\",\"useRange\":false,\"verticalSplit\":false},\"handleNoResults\":true,\"type\":\"gauge\"},\"title\":\"Number of Containers [Metricbeat Docker]\",\"type\":\"metric\"}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Apache-HTTPD-CPU",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\n  \"filter\": []\n}"
-      },
-      "savedSearchId": "Apache-HTTPD",
-      "title": "CPU usage [Metricbeat Apache]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\n  \"title\": \"Apache HTTPD - CPU\",\n  \"type\": \"line\",\n  \"params\": {\n    \"shareYAxis\": true,\n    \"addTooltip\": true,\n    \"addLegend\": true,\n    \"showCircles\": true,\n    \"smoothLines\": false,\n    \"interpolate\": \"linear\",\n    \"scale\": \"linear\",\n    \"drawLinesBetweenPoints\": true,\n    \"radiusRatio\": 9,\n    \"times\": [],\n    \"addTimeMarker\": false,\n    \"defaultYExtents\": false,\n    \"setYExtents\": false,\n    \"yAxis\": {}\n  },\n  \"aggs\": [\n    {\n      \"id\": \"1\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.cpu.load\",\n        \"customLabel\": \"CPU load\"\n      }\n    },\n    {\n      \"id\": \"2\",\n      \"type\": \"date_histogram\",\n      \"schema\": \"segment\",\n      \"params\": {\n        \"field\": \"@timestamp\",\n        \"interval\": \"auto\",\n        \"customInterval\": \"2h\",\n        \"min_doc_count\": 1,\n        \"extended_bounds\": {}\n      }\n    },\n    {\n      \"id\": \"3\",\n      \"type\": \"terms\",\n      \"schema\": \"split\",\n      \"params\": {\n        \"field\": \"apache.status.hostname\",\n        \"size\": 5,\n        \"order\": \"desc\",\n        \"orderBy\": \"1\",\n        \"row\": true\n      }\n    },\n    {\n      \"id\": \"4\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.cpu.user\",\n        \"customLabel\": \"CPU user\"\n      }\n    },\n    {\n      \"id\": \"5\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.cpu.system\",\n        \"customLabel\": \"CPU system\"\n      }\n    },\n    {\n      \"id\": \"6\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.cpu.children_user\",\n        \"customLabel\": \"CPU children user\"\n      }\n    },\n    {\n      \"id\": \"7\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.cpu.children_system\",\n        \"customLabel\": \"CPU children system\"\n      }\n    }\n  ],\n  \"listeners\": {}\n}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Apache-HTTPD-Hostname-list",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\n  \"filter\": []\n}"
-      },
-      "savedSearchId": "Apache-HTTPD",
-      "title": "Hostname list [Metricbeat Apache]",
-      "uiStateJSON": "{\n  \"vis\": {\n    \"params\": {\n      \"sort\": {\n        \"columnIndex\": null,\n        \"direction\": null\n      }\n    }\n  }\n}",
-      "version": 1,
-      "visState": "{\n  \"title\": \"Apache HTTPD - Hostname list\",\n  \"type\": \"table\",\n  \"params\": {\n    \"perPage\": 10,\n    \"showPartialRows\": false,\n    \"showMeticsAtAllLevels\": false,\n    \"sort\": {\n      \"columnIndex\": null,\n      \"direction\": null\n    }\n  },\n  \"aggs\": [\n    {\n      \"id\": \"1\",\n      \"type\": \"count\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"customLabel\": \"Events count\"\n      }\n    },\n    {\n      \"id\": \"2\",\n      \"type\": \"terms\",\n      \"schema\": \"bucket\",\n      \"params\": {\n        \"field\": \"apache.status.hostname\",\n        \"size\": 5,\n        \"order\": \"desc\",\n        \"orderBy\": \"1\",\n        \"customLabel\": \"Apache HTTD Hostname\"\n      }\n    }\n  ],\n  \"listeners\": {}\n}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Apache-HTTPD-Load1-slash-5-slash-15",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\n  \"filter\": []\n}"
-      },
-      "savedSearchId": "Apache-HTTPD",
-      "title": "Load1/5/15 [Metricbeat Apache]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\n  \"title\": \"Apache HTTPD - Load1/5/15\",\n  \"type\": \"line\",\n  \"params\": {\n    \"shareYAxis\": true,\n    \"addTooltip\": true,\n    \"addLegend\": true,\n    \"showCircles\": true,\n    \"smoothLines\": false,\n    \"interpolate\": \"linear\",\n    \"scale\": \"linear\",\n    \"drawLinesBetweenPoints\": true,\n    \"radiusRatio\": 9,\n    \"times\": [],\n    \"addTimeMarker\": false,\n    \"defaultYExtents\": false,\n    \"setYExtents\": false,\n    \"yAxis\": {}\n  },\n  \"aggs\": [\n    {\n      \"id\": \"1\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.load.5\",\n        \"customLabel\": \"Load 5\"\n      }\n    },\n    {\n      \"id\": \"2\",\n      \"type\": \"date_histogram\",\n      \"schema\": \"segment\",\n      \"params\": {\n        \"field\": \"@timestamp\",\n        \"interval\": \"auto\",\n        \"customInterval\": \"2h\",\n        \"min_doc_count\": 1,\n        \"extended_bounds\": {}\n      }\n    },\n    {\n      \"id\": \"3\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.load.1\",\n        \"customLabel\": \"Load 1\"\n      }\n    },\n    {\n      \"id\": \"4\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.load.15\",\n        \"customLabel\": \"Load 15\"\n      }\n    },\n    {\n      \"id\": \"5\",\n      \"type\": \"terms\",\n      \"schema\": \"split\",\n      \"params\": {\n        \"field\": \"apache.status.hostname\",\n        \"size\": 5,\n        \"order\": \"desc\",\n        \"orderBy\": \"1\",\n        \"customLabel\": \"Hostname\",\n        \"row\": true\n      }\n    }\n  ],\n  \"listeners\": {}\n}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Apache-HTTPD-Scoreboard",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\n  \"filter\": []\n}"
-      },
-      "savedSearchId": "Apache-HTTPD",
-      "title": "Scoreboard [Metricbeat Apache]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\n  \"title\": \"Apache HTTPD - Scoreboard\",\n  \"type\": \"line\",\n  \"params\": {\n    \"shareYAxis\": true,\n    \"addTooltip\": true,\n    \"addLegend\": true,\n    \"showCircles\": true,\n    \"smoothLines\": false,\n    \"interpolate\": \"linear\",\n    \"scale\": \"linear\",\n    \"drawLinesBetweenPoints\": true,\n    \"radiusRatio\": 9,\n    \"times\": [],\n    \"addTimeMarker\": false,\n    \"defaultYExtents\": false,\n    \"setYExtents\": false,\n    \"yAxis\": {}\n  },\n  \"aggs\": [\n    {\n      \"id\": \"1\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.scoreboard.closing_connection\",\n        \"customLabel\": \"Closing connection\"\n      }\n    },\n    {\n      \"id\": \"2\",\n      \"type\": \"date_histogram\",\n      \"schema\": \"segment\",\n      \"params\": {\n        \"field\": \"@timestamp\",\n        \"interval\": \"auto\",\n        \"customInterval\": \"2h\",\n        \"min_doc_count\": 1,\n        \"extended_bounds\": {}\n      }\n    },\n    {\n      \"id\": \"3\",\n      \"type\": \"terms\",\n      \"schema\": \"split\",\n      \"params\": {\n        \"field\": \"apache.status.hostname\",\n        \"size\": 5,\n        \"order\": \"desc\",\n        \"orderBy\": \"1\",\n        \"customLabel\": \"Hostname\",\n        \"row\": true\n      }\n    },\n    {\n      \"id\": \"4\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.scoreboard.dns_lookup\",\n        \"customLabel\": \"DNS lookup\"\n      }\n    },\n    {\n      \"id\": \"5\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.scoreboard.gracefully_finishing\",\n        \"customLabel\": \"Gracefully finishing\"\n      }\n    },\n    {\n      \"id\": \"6\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.scoreboard.idle_cleanup\",\n        \"customLabel\": \"Idle cleanup\"\n      }\n    },\n    {\n      \"id\": \"7\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.scoreboard.keepalive\",\n        \"customLabel\": \"Keepalive\"\n      }\n    },\n    {\n      \"id\": \"8\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.scoreboard.logging\",\n        \"customLabel\": \"Logging\"\n      }\n    },\n    {\n      \"id\": \"9\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.scoreboard.open_slot\",\n        \"customLabel\": \"Open slot\"\n      }\n    },\n    {\n      \"id\": \"10\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.scoreboard.reading_request\",\n        \"customLabel\": \"Reading request\"\n      }\n    },\n    {\n      \"id\": \"11\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.scoreboard.sending_reply\",\n        \"customLabel\": \"Sending reply\"\n      }\n    },\n    {\n      \"id\": \"12\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.scoreboard.starting_up\",\n        \"customLabel\": \"Starting up\"\n      }\n    },\n    {\n      \"id\": \"13\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.scoreboard.total\",\n        \"customLabel\": \"Total\"\n      }\n    },\n    {\n      \"id\": \"14\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.scoreboard.waiting_for_connection\",\n        \"customLabel\": \"Waiting for connection\"\n      }\n    }\n  ],\n  \"listeners\": {}\n}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Apache-HTTPD-Total-accesses-and-kbytes",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\n  \"filter\": []\n}"
-      },
-      "savedSearchId": "Apache-HTTPD",
-      "title": "Total accesses and kbytes [Metricbeat Apache]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\n  \"title\": \"Apache HTTPD - Total accesses and kbytes\",\n  \"type\": \"metric\",\n  \"params\": {\n    \"handleNoResults\": true,\n    \"fontSize\": 60\n  },\n  \"aggs\": [\n    {\n      \"id\": \"1\",\n      \"type\": \"max\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.total_kbytes\",\n        \"customLabel\": \"Total kbytes\"\n      }\n    },\n    {\n      \"id\": \"2\",\n      \"type\": \"max\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.total_accesses\",\n        \"customLabel\": \"Total accesses\"\n      }\n    }\n  ],\n  \"listeners\": {}\n}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Apache-HTTPD-Uptime",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\n  \"filter\": []\n}"
-      },
-      "savedSearchId": "Apache-HTTPD",
-      "title": "Uptime [Metricbeat Apache]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\n  \"title\": \"Apache HTTPD - Uptime\",\n  \"type\": \"metric\",\n  \"params\": {\n    \"handleNoResults\": true,\n    \"fontSize\": 60\n  },\n  \"aggs\": [\n    {\n      \"id\": \"1\",\n      \"type\": \"max\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.uptime.uptime\",\n        \"customLabel\": \"Uptime\"\n      }\n    },\n    {\n      \"id\": \"2\",\n      \"type\": \"max\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.uptime.server_uptime\",\n        \"customLabel\": \"Server uptime\"\n      }\n    }\n  ],\n  \"listeners\": {}\n}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Apache-HTTPD-Workers",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\n  \"filter\": []\n}"
-      },
-      "savedSearchId": "Apache-HTTPD",
-      "title": "Workers [Metricbeat Apache]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\n  \"title\": \"Apache HTTPD - Workers\",\n  \"type\": \"line\",\n  \"params\": {\n    \"shareYAxis\": true,\n    \"addTooltip\": true,\n    \"addLegend\": true,\n    \"showCircles\": true,\n    \"smoothLines\": false,\n    \"interpolate\": \"linear\",\n    \"scale\": \"linear\",\n    \"drawLinesBetweenPoints\": true,\n    \"radiusRatio\": 9,\n    \"times\": [],\n    \"addTimeMarker\": false,\n    \"defaultYExtents\": false,\n    \"setYExtents\": false,\n    \"yAxis\": {}\n  },\n  \"aggs\": [\n    {\n      \"id\": \"1\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.workers.busy\",\n        \"customLabel\": \"Busy workers\"\n      }\n    },\n    {\n      \"id\": \"2\",\n      \"type\": \"date_histogram\",\n      \"schema\": \"segment\",\n      \"params\": {\n        \"field\": \"@timestamp\",\n        \"interval\": \"auto\",\n        \"customInterval\": \"2h\",\n        \"min_doc_count\": 1,\n        \"extended_bounds\": {}\n      }\n    },\n    {\n      \"id\": \"3\",\n      \"type\": \"terms\",\n      \"schema\": \"split\",\n      \"params\": {\n        \"field\": \"apache.status.hostname\",\n        \"size\": 5,\n        \"order\": \"desc\",\n        \"orderBy\": \"1\",\n        \"customLabel\": \"Hostname\",\n        \"row\": true\n      }\n    },\n    {\n      \"id\": \"4\",\n      \"type\": \"avg\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"apache.status.workers.idle\",\n        \"customLabel\": \"Idle workers\"\n      }\n    }\n  ],\n  \"listeners\": {}\n}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "58000780-f529-11e6-844d-b170e2f0a07e",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Heap Summary [Metricbeat Golang]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Heap Summary [Metricbeat Golang]\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index=\\\"metricbeat*\\\",metric=\\\"avg:golang.heap.system.total\\\").label(\\\"System Total Memory\\\").yaxis(label=\\\"Bytes\\\"),.es(index=\\\"metricbeat*\\\",metric=\\\"min:golang.heap.allocations.allocated\\\").label(\\\"Bytes Allocated(min)\\\").movingaverage(30),.es(index=\\\"metricbeat*\\\",metric=\\\"max:golang.heap.allocations.allocated\\\").label(\\\"Bytes Allocated(max)\\\").movingaverage(30),.es(index=\\\"metricbeat*\\\",metric=\\\"avg:golang.heap.gc.next_gc_limit\\\").label(\\\"GC Limit\\\"),.es(index=\\\"metricbeat*\\\",metric=\\\"avg:golang.heap.gc.pause.count\\\").condition(lt,1, null).points().label(\\\"GC Cycles(count)\\\").yaxis(2,label=\\\"Count\\\")\",\"interval\":\"10s\"},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "95388680-f52a-11e6-969c-518c48c913e4",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Heap  [Metricbeat Golang]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Heap  [Metricbeat Golang]\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index=\\\"metricbeat*\\\",metric=\\\"avg:golang.heap.allocations.total\\\").label(\\\"Heap Total\\\").derivative().movingaverage(30),.es(index=\\\"metricbeat*\\\",metric=\\\"avg:golang.heap.allocations.active\\\").label(\\\"Heap Inuse\\\").movingaverage(30),.es(index=\\\"metricbeat*\\\",metric=\\\"avg:golang.heap.allocations.allocated\\\").label(\\\"Heap Allocated\\\").movingaverage(30),.es(index=\\\"metricbeat*\\\",metric=\\\"avg:golang.heap.allocations.idle\\\").label(\\\"Heap Idle\\\").movingaverage(30)\",\"interval\":\"10s\"},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "b59a5200-f52a-11e6-969c-518c48c913e4",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Objects [Metricbeat Golang]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Objects [Metricbeat Golang]\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index=\\\"metricbeat*\\\",metric=\\\"max:golang.heap.allocations.objects\\\").label(\\\"Object Count(avg)\\\").yaxis(1,label=\\\"Count\\\").movingaverage(30),.es(index=\\\"metricbeat*\\\",metric=\\\"max:golang.heap.allocations.total\\\").derivative().label(\\\"Allocation Rate\\\").yaxis(2,label=\\\"Rate\\\").movingaverage(30)\",\"interval\":\"10s\"},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "9a9a8bf0-f52a-11e6-969c-518c48c913e4",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "System  [Metricbeat Golang]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"System  [Metricbeat Golang]\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index=\\\"metricbeat*\\\",metric=\\\"avg:golang.heap.system.total\\\").label(\\\"System Total\\\"),.es(index=\\\"metricbeat*\\\",metric=\\\"avg:golang.heap.system.optained\\\").label(\\\"System Obtained\\\"),.es(index=\\\"metricbeat*\\\",metric=\\\"avg:golang.heap.system.stack\\\").label(\\\"System Stack\\\"),.es(index=\\\"metricbeat*\\\",metric=\\\"avg:golang.heap.system.released\\\").label(\\\"System Released\\\")\",\"interval\":\"10s\"},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "b046cb80-f52a-11e6-969c-518c48c913e4",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "GC count [Metricbeat Golang]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"GC count [Metricbeat Golang]\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index=\\\"metricbeat*\\\",metric=\\\"max:golang.heap.gc.pause.count\\\").label(\\\"GC Count\\\").bars().yaxis(label=\\\"Count\\\"),.es(index=\\\"metricbeat*\\\",metric=\\\"max:golang.heap.gc.total_count\\\").label(\\\"GC Rate\\\").derivative().movingaverage(30),.es(index=\\\"metricbeat*\\\",metric=\\\"max:golang.heap.gc.cpu_fraction\\\").label(\\\"CPU Fraction\\\").yaxis(2,label=\\\"Fraction\\\")\",\"interval\":\"10s\"},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "ab226b50-f52a-11e6-969c-518c48c913e4",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "GC durations [Metricbeat Golang]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"GC durations [Metricbeat Golang]\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index=\\\"metricbeat*\\\",metric=\\\"max:golang.heap.gc.pause.sum.ns\\\").bars().label(\\\"sum of GC Pause durations(ns)\\\").yaxis(label=\\\"Durations(ns)\\\"),.es(index=\\\"metricbeat*\\\",metric=\\\"max:golang.heap.gc.total_pause.ns\\\").derivative().movingaverage(30).label(\\\"Total GC Pause(ns) Rate\\\"),.es(index=\\\"metricbeat*\\\",metric=\\\"max:golang.heap.gc.pause.max.ns\\\").label(\\\"Max GC Pause(ns)\\\").movingaverage(30),.es(index=\\\"metricbeat*\\\",metric=\\\"avg:golang.heap.gc.pause.avg.ns\\\").label(\\\"Avg GC Pause(ns)\\\").movingaverage(30),.es(index=\\\"metricbeat*\\\",metric=\\\"max:golang.heap.gc.pause.count\\\").condition(lt,1, null).label(\\\"GC Pause count\\\").points().yaxis(2,label=\\\"Count\\\")\",\"interval\":\"10s\"},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "MongoDB-hosts",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "MongoDB-search",
-      "title": "Hosts [Metricbeat MongoDB]",
-      "uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
-      "version": 1,
-      "visState": "{\"title\":\"Hosts [Metricbeat MongoDB]\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.connections.current\",\"customLabel\":\"Number of connections\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"metricset.host\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.memory.bits\",\"customLabel\":\"Arch\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.memory.resident.mb\",\"customLabel\":\"Resident memory\"}},{\"id\":\"5\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.memory.virtual.mb\",\"customLabel\":\"Virtual memory\"}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "MongoDB-Engine-ampersand-Version",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "MongoDB-search",
-      "title": "Engine & Version [Metricbeat MongoDB]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Engine & Version [Metricbeat MongoDB]\",\"type\":\"pie\",\"params\":{\"addLegend\":true,\"addTooltip\":true,\"isDonut\":true,\"legendPosition\":\"bottom\",\"shareYAxis\":true,\"type\":\"pie\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"metricset.host\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"mongodb.status.storage_engine.name\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Engine\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"mongodb.status.version\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Version\"}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "MongoDB-operation-counters",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "MongoDB-search",
-      "title": "Operation counters [Metricbeat MongoDB]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Operation counters [Metricbeat MongoDB]\",\"type\":\"area\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"smoothLines\":false,\"scale\":\"linear\",\"interpolate\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{},\"type\":\"area\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"area\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"valueAxis\":\"ValueAxis-1\"}]},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.opcounters.command\",\"customLabel\":\"command\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.opcounters.delete\",\"customLabel\":\"delete\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.opcounters.getmore\",\"customLabel\":\"getmore\"}},{\"id\":\"5\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.opcounters.insert\",\"customLabel\":\"insert\"}},{\"id\":\"6\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.opcounters.query\",\"customLabel\":\"query\"}},{\"id\":\"7\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.opcounters_replicated.update\",\"customLabel\":\"update\"}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "MongoDB-Concurrent-transactions-Read",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "MongoDB-search",
-      "title": "Concurrent transactions Read [Metricbeat MongoDB]",
-      "uiStateJSON": "{\"vis\":{\"colors\":{\"Read Available\":\"#508642\",\"Read Used\":\"#BF1B00\"}}}",
-      "version": 1,
-      "visState": "{\"title\":\"Concurrent transactions Read [Metricbeat MongoDB]\",\"type\":\"area\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"smoothLines\":false,\"scale\":\"linear\",\"interpolate\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{},\"type\":\"area\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"area\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"valueAxis\":\"ValueAxis-1\"}]},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.wired_tiger.concurrent_transactions.read.available\",\"customLabel\":\"Read Available\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.wired_tiger.concurrent_transactions.read.out\",\"customLabel\":\"Read Used\"}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "MongoDB-Concurrent-transactions-Write",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "MongoDB-search",
-      "title": "Concurrent transactions Write [Metricbeat MongoDB]",
-      "uiStateJSON": "{\"vis\":{\"colors\":{\"Write Available\":\"#629E51\",\"Write Used\":\"#BF1B00\"}}}",
-      "version": 1,
-      "visState": "{\"title\":\"Concurrent transactions Write [Metricbeat MongoDB]\",\"type\":\"area\",\"params\":{\"addLegend\":true,\"addTimeMarker\":false,\"addTooltip\":true,\"defaultYExtents\":false,\"interpolate\":\"linear\",\"legendPosition\":\"bottom\",\"mode\":\"stacked\",\"scale\":\"linear\",\"setYExtents\":false,\"shareYAxis\":true,\"smoothLines\":false,\"times\":[],\"yAxis\":{},\"type\":\"area\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"area\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"valueAxis\":\"ValueAxis-1\"}]},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.wired_tiger.concurrent_transactions.write.available\",\"customLabel\":\"Write Available\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.wired_tiger.concurrent_transactions.write.out\",\"customLabel\":\"Write Used\"}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "MongoDB-memory-stats",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "MongoDB-search",
-      "title": "Memory stats [Metricbeat MongoDB]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Memory stats [Metricbeat MongoDB]\",\"type\":\"line\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"showCircles\":true,\"smoothLines\":false,\"interpolate\":\"linear\",\"scale\":\"log\",\"drawLinesBetweenPoints\":true,\"radiusRatio\":9,\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{},\"type\":\"line\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"line\",\"mode\":\"normal\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}]},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.memory.mapped.mb\",\"customLabel\":\"Mapped\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.memory.mapped_with_journal.mb\",\"customLabel\":\"Mapped with journal\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.memory.resident.mb\",\"customLabel\":\"Rezident\"}},{\"id\":\"5\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.memory.virtual.mb\",\"customLabel\":\"Virtual\"}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "MongoDB-asserts",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "MongoDB-search",
-      "title": "Asserts [Metricbeat MongoDB]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Asserts [Metricbeat MongoDB]\",\"type\":\"area\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"smoothLines\":false,\"scale\":\"linear\",\"interpolate\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{},\"type\":\"area\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"area\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"valueAxis\":\"ValueAxis-1\"}]},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.asserts.msg\",\"customLabel\":\"message\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.asserts.regular\",\"customLabel\":\"regular\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.asserts.rollovers\",\"customLabel\":\"rollover\"}},{\"id\":\"5\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.asserts.user\",\"customLabel\":\"user\"}},{\"id\":\"6\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.asserts.warning\",\"customLabel\":\"warning\"}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "MongoDB-WiredTiger-Cache",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "MongoDB-search",
-      "title": "WiredTiger Cache [Metricbeat MongoDB]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"WiredTiger Cache [Metricbeat MongoDB]\",\"type\":\"area\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"smoothLines\":false,\"scale\":\"linear\",\"interpolate\":\"linear\",\"mode\":\"overlap\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{},\"type\":\"area\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"area\",\"mode\":\"stacked\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"valueAxis\":\"ValueAxis-1\"}]},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.wired_tiger.cache.maximum.bytes\",\"customLabel\":\"max\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.wired_tiger.cache.used.bytes\",\"customLabel\":\"used\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mongodb.status.wired_tiger.cache.dirty.bytes\",\"customLabel\":\"dirty\"}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "System-Navigation",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "System Navigation [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"System Navigation [Metricbeat System]\",\"type\":\"markdown\",\"params\":{\"markdown\":\"[System Overview](#/dashboard/Metricbeat-system-overview)  | [Host Overview](#/dashboard/79ffd6e0-faa0-11e6-947f-177f697178b8) |\\n[Containers overview](#/dashboard/CPU-slash-Memory-per-container)\",\"fontSize\":12},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "522ee670-1b92-11e7-bec4-a5e9ec5cab8b",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Inbound Traffic [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Inbound Traffic [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"axis_formatter\":\"number\",\"axis_position\":\"left\",\"background_color_rules\":[{\"id\":\"0e346760-1b92-11e7-bec4-a5e9ec5cab8b\"}],\"filter\":\"-system.network.name:l*\",\"id\":\"0c761590-1b92-11e7-bec4-a5e9ec5cab8b\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"series\":[{\"axis_position\":\"right\",\"chart_type\":\"line\",\"color\":\"#68BC00\",\"fill\":0.5,\"formatter\":\"bytes\",\"id\":\"0c761591-1b92-11e7-bec4-a5e9ec5cab8b\",\"label\":\"Inbound Traffic\",\"line_width\":1,\"metrics\":[{\"field\":\"system.network.in.bytes\",\"id\":\"0c761592-1b92-11e7-bec4-a5e9ec5cab8b\",\"type\":\"max\"},{\"field\":\"0c761592-1b92-11e7-bec4-a5e9ec5cab8b\",\"id\":\"1d659060-1b92-11e7-bec4-a5e9ec5cab8b\",\"type\":\"derivative\",\"unit\":\"1s\"},{\"field\":\"1d659060-1b92-11e7-bec4-a5e9ec5cab8b\",\"id\":\"f2074f70-1b92-11e7-a416-41f5ccdba2e6\",\"type\":\"positive_only\",\"unit\":\"\"},{\"id\":\"c40e18f0-2c55-11e7-a0ad-277ce466684d\",\"type\":\"series_agg\",\"function\":\"sum\"}],\"point_size\":1,\"seperate_axis\":0,\"split_mode\":\"terms\",\"stacked\":\"none\",\"value_template\":\"{{value}}/s\",\"terms_field\":\"system.network.name\"},{\"axis_position\":\"right\",\"chart_type\":\"line\",\"color\":\"#68BC00\",\"fill\":0.5,\"formatter\":\"bytes\",\"id\":\"37f70440-1b92-11e7-bec4-a5e9ec5cab8b\",\"label\":\"Total Transferred\",\"line_width\":1,\"metrics\":[{\"field\":\"system.network.in.bytes\",\"id\":\"37f72b50-1b92-11e7-bec4-a5e9ec5cab8b\",\"type\":\"max\"},{\"field\":\"37f72b50-1b92-11e7-bec4-a5e9ec5cab8b\",\"id\":\"37f72b51-1b92-11e7-bec4-a5e9ec5cab8b\",\"type\":\"derivative\",\"unit\":\"\"},{\"unit\":\"\",\"id\":\"f9da2dd0-1b92-11e7-a416-41f5ccdba2e6\",\"type\":\"positive_only\",\"field\":\"37f72b51-1b92-11e7-bec4-a5e9ec5cab8b\"},{\"sigma\":\"\",\"field\":\"f9da2dd0-1b92-11e7-a416-41f5ccdba2e6\",\"id\":\"3e63c2f0-1b92-11e7-bec4-a5e9ec5cab8b\",\"type\":\"series_agg\",\"function\":\"overall_sum\"}],\"point_size\":1,\"seperate_axis\":0,\"split_mode\":\"terms\",\"stacked\":\"none\",\"value_template\":\"{{value}}\",\"terms_field\":\"system.network.name\"}],\"show_legend\":1,\"time_field\":\"@timestamp\",\"type\":\"metric\",\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "1aae9140-1b93-11e7-8ada-3df93aab833e",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Outbound Traffic [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Outbound Traffic [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"axis_formatter\":\"number\",\"axis_position\":\"left\",\"background_color_rules\":[{\"id\":\"0e346760-1b92-11e7-bec4-a5e9ec5cab8b\"}],\"filter\":\"-system.network.name:l*\",\"id\":\"0c761590-1b92-11e7-bec4-a5e9ec5cab8b\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"series\":[{\"axis_position\":\"right\",\"chart_type\":\"line\",\"color\":\"#68BC00\",\"fill\":0.5,\"formatter\":\"bytes\",\"id\":\"0c761591-1b92-11e7-bec4-a5e9ec5cab8b\",\"label\":\"Outbound Traffic\",\"line_width\":1,\"metrics\":[{\"field\":\"system.network.out.bytes\",\"id\":\"0c761592-1b92-11e7-bec4-a5e9ec5cab8b\",\"type\":\"max\"},{\"field\":\"0c761592-1b92-11e7-bec4-a5e9ec5cab8b\",\"id\":\"1d659060-1b92-11e7-bec4-a5e9ec5cab8b\",\"type\":\"derivative\",\"unit\":\"1s\"},{\"field\":\"1d659060-1b92-11e7-bec4-a5e9ec5cab8b\",\"id\":\"f2074f70-1b92-11e7-a416-41f5ccdba2e6\",\"type\":\"positive_only\",\"unit\":\"\"},{\"id\":\"a1737470-2c55-11e7-a0ad-277ce466684d\",\"type\":\"series_agg\",\"function\":\"sum\"}],\"point_size\":1,\"seperate_axis\":0,\"split_mode\":\"terms\",\"stacked\":\"none\",\"value_template\":\"{{value}}/s\",\"terms_field\":\"system.network.name\"},{\"axis_position\":\"right\",\"chart_type\":\"line\",\"color\":\"#68BC00\",\"fill\":0.5,\"formatter\":\"bytes\",\"id\":\"37f70440-1b92-11e7-bec4-a5e9ec5cab8b\",\"label\":\"Total Transferred\",\"line_width\":1,\"metrics\":[{\"field\":\"system.network.out.bytes\",\"id\":\"37f72b50-1b92-11e7-bec4-a5e9ec5cab8b\",\"type\":\"max\"},{\"field\":\"37f72b50-1b92-11e7-bec4-a5e9ec5cab8b\",\"id\":\"37f72b51-1b92-11e7-bec4-a5e9ec5cab8b\",\"type\":\"derivative\",\"unit\":\"\"},{\"unit\":\"\",\"id\":\"f9da2dd0-1b92-11e7-a416-41f5ccdba2e6\",\"type\":\"positive_only\",\"field\":\"37f72b51-1b92-11e7-bec4-a5e9ec5cab8b\"},{\"sigma\":\"\",\"field\":\"f9da2dd0-1b92-11e7-a416-41f5ccdba2e6\",\"id\":\"3e63c2f0-1b92-11e7-bec4-a5e9ec5cab8b\",\"type\":\"series_agg\",\"function\":\"overall_sum\"}],\"point_size\":1,\"seperate_axis\":0,\"split_mode\":\"terms\",\"stacked\":\"none\",\"value_template\":\"{{value}}\",\"terms_field\":\"system.network.name\"}],\"show_legend\":1,\"time_field\":\"@timestamp\",\"type\":\"metric\",\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "825fdb80-4d1d-11e7-b5f2-2b7c1895bf32",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Disk used [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Disk used [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"4e4dc780-4d1d-11e7-b5f2-2b7c1895bf32\",\"type\":\"gauge\",\"series\":[{\"id\":\"4e4dee90-4d1d-11e7-b5f2-2b7c1895bf32\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"4e4dee91-4d1d-11e7-b5f2-2b7c1895bf32\",\"type\":\"avg\",\"field\":\"system.fsstat.total_size.used\"},{\"id\":\"57c96ee0-4d54-11e7-b5f2-2b7c1895bf32\",\"type\":\"avg\",\"field\":\"system.fsstat.total_size.total\"},{\"script\":\"params.total != null && params.total > 0 ? params.used/params.total : null\",\"id\":\"6304cca0-4d54-11e7-b5f2-2b7c1895bf32\",\"type\":\"calculation\",\"variables\":[{\"id\":\"6da10430-4d54-11e7-b5f2-2b7c1895bf32\",\"field\":\"4e4dee91-4d1d-11e7-b5f2-2b7c1895bf32\",\"name\":\"used\"},{\"id\":\"73b8c510-4d54-11e7-b5f2-2b7c1895bf32\",\"name\":\"total\",\"field\":\"57c96ee0-4d54-11e7-b5f2-2b7c1895bf32\"}]}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"percent\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"label\":\"Disk used\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"gauge_color_rules\":[{\"value\":0,\"id\":\"51921d10-4d1d-11e7-b5f2-2b7c1895bf32\",\"gauge\":\"rgba(104,188,0,1)\",\"opperator\":\"gte\"},{\"value\":0.7,\"id\":\"f26de750-4d54-11e7-b5f2-2b7c1895bf32\",\"gauge\":\"rgba(251,158,0,1)\",\"opperator\":\"gte\"},{\"value\":0.85,\"id\":\"fa31d190-4d54-11e7-b5f2-2b7c1895bf32\",\"gauge\":\"rgba(211,49,21,1)\",\"opperator\":\"gte\"}],\"gauge_width\":10,\"gauge_inner_width\":10,\"gauge_style\":\"half\",\"gauge_max\":\"1\",\"filter\":\"\",\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "d3166e80-1b91-11e7-bec4-a5e9ec5cab8b",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Memory Usage Gauge [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Memory Usage Gauge [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"9f51b730-1b91-11e7-bec4-a5e9ec5cab8b\",\"type\":\"gauge\",\"series\":[{\"id\":\"9f51b731-1b91-11e7-bec4-a5e9ec5cab8b\",\"color\":\"#68BC00\",\"split_mode\":\"everything\",\"metrics\":[{\"id\":\"9f51b732-1b91-11e7-bec4-a5e9ec5cab8b\",\"type\":\"avg\",\"field\":\"system.memory.actual.used.pct\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"percent\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"label\":\"Memory Usage\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"gauge_color_rules\":[{\"value\":0,\"id\":\"a0d522e0-1b91-11e7-bec4-a5e9ec5cab8b\",\"gauge\":\"rgba(104,188,0,1)\",\"opperator\":\"gte\"},{\"value\":0.7,\"id\":\"b45ad8f0-1b91-11e7-bec4-a5e9ec5cab8b\",\"gauge\":\"rgba(254,146,0,1)\",\"opperator\":\"gte\"},{\"value\":0.85,\"id\":\"c06e9550-1b91-11e7-bec4-a5e9ec5cab8b\",\"gauge\":\"rgba(211,49,21,1)\",\"opperator\":\"gte\"}],\"gauge_width\":10,\"gauge_inner_width\":10,\"gauge_style\":\"half\",\"gauge_max\":\"1\",\"filter\":\"\",\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "83e12df0-1b91-11e7-bec4-a5e9ec5cab8b",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "CPU Usage Gauge [Metricbeat System]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"CPU Usage Gauge [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"axis_formatter\":\"number\",\"axis_position\":\"left\",\"filter\":\"\",\"gauge_color_rules\":[{\"gauge\":\"rgba(104,188,0,1)\",\"id\":\"4ef2c3b0-1b91-11e7-bec4-a5e9ec5cab8b\",\"opperator\":\"gte\",\"value\":0},{\"gauge\":\"rgba(254,146,0,1)\",\"id\":\"e6561ae0-1b91-11e7-bec4-a5e9ec5cab8b\",\"opperator\":\"gte\",\"value\":0.7},{\"gauge\":\"rgba(211,49,21,1)\",\"id\":\"ec655040-1b91-11e7-bec4-a5e9ec5cab8b\",\"opperator\":\"gte\",\"value\":0.85}],\"gauge_inner_width\":10,\"gauge_max\":\"1\",\"gauge_style\":\"half\",\"gauge_width\":10,\"id\":\"4c9e2550-1b91-11e7-bec4-a5e9ec5cab8b\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"series\":[{\"axis_position\":\"right\",\"chart_type\":\"line\",\"color\":\"#68BC00\",\"fill\":0.5,\"formatter\":\"percent\",\"id\":\"4c9e2551-1b91-11e7-bec4-a5e9ec5cab8b\",\"label\":\"CPU Usage\",\"line_width\":1,\"metrics\":[{\"field\":\"system.cpu.user.pct\",\"id\":\"4c9e2552-1b91-11e7-bec4-a5e9ec5cab8b\",\"type\":\"avg\"},{\"field\":\"system.cpu.system.pct\",\"id\":\"225c2140-5fd7-11e7-a63a-a937b7c1a7e1\",\"type\":\"avg\"},{\"field\":\"system.cpu.cores\",\"id\":\"837a30c0-5fd7-11e7-a63a-a937b7c1a7e1\",\"type\":\"avg\"},{\"script\":\"params.n > 0 ? (params.user+params.system)/params.n : null\",\"id\":\"587aa510-1b91-11e7-bec4-a5e9ec5cab8b\",\"type\":\"calculation\",\"variables\":[{\"field\":\"4c9e2552-1b91-11e7-bec4-a5e9ec5cab8b\",\"id\":\"5a19af10-1b91-11e7-bec4-a5e9ec5cab8b\",\"name\":\"user\"},{\"field\":\"225c2140-5fd7-11e7-a63a-a937b7c1a7e1\",\"id\":\"32b54f80-5fd7-11e7-a63a-a937b7c1a7e1\",\"name\":\"system\"},{\"field\":\"837a30c0-5fd7-11e7-a63a-a937b7c1a7e1\",\"id\":\"8ba6eef0-5fd7-11e7-a63a-a937b7c1a7e1\",\"name\":\"n\"}]}],\"point_size\":1,\"seperate_axis\":0,\"split_mode\":\"everything\",\"stacked\":\"none\"}],\"show_legend\":1,\"time_field\":\"@timestamp\",\"type\":\"gauge\",\"show_grid\":1},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Redis-Clients-Metrics",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "Metricbeat-Redis",
-      "title": "Clients [Metricbeat Redis]",
-      "uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}}",
-      "version": 1,
-      "visState": "{\"aggs\":[{\"enabled\":true,\"id\":\"1\",\"params\":{\"customLabel\":\"Connected clients\",\"field\":\"redis.info.clients.connected\"},\"schema\":\"metric\",\"type\":\"max\"}],\"listeners\":{},\"params\":{\"addLegend\":false,\"addTooltip\":true,\"fontSize\":60,\"gauge\":{\"autoExtend\":false,\"backStyle\":\"Full\",\"colorSchema\":\"Green to Red\",\"colorsRange\":[{\"from\":0,\"to\":100}],\"gaugeColorMode\":\"None\",\"gaugeStyle\":\"Full\",\"gaugeType\":\"Metric\",\"invertColors\":false,\"labels\":{\"color\":\"black\",\"show\":true},\"orientation\":\"vertical\",\"percentageMode\":false,\"scale\":{\"color\":\"#333\",\"labels\":false,\"show\":false,\"width\":2},\"style\":{\"bgColor\":false,\"fontSize\":60,\"labelColor\":false,\"subText\":\"\"},\"type\":\"simple\",\"useRange\":false,\"verticalSplit\":false},\"handleNoResults\":true,\"type\":\"gauge\"},\"title\":\"Clients [Metricbeat Redis]\",\"type\":\"metric\"}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Redis-Connected-clients",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "Metricbeat-Redis",
-      "title": "Connected clients [Metricbeat Redis]",
-      "uiStateJSON": "{\"vis\":{\"colors\":{\"Blocked\":\"#C15C17\"}}}",
-      "version": 1,
-      "visState": "{\"aggs\":[{\"enabled\":true,\"id\":\"1\",\"params\":{\"customLabel\":\"Connected\",\"field\":\"redis.info.clients.connected\"},\"schema\":\"metric\",\"type\":\"max\"},{\"enabled\":true,\"id\":\"2\",\"params\":{\"customInterval\":\"2h\",\"extended_bounds\":{},\"field\":\"@timestamp\",\"interval\":\"auto\",\"min_doc_count\":1},\"schema\":\"segment\",\"type\":\"date_histogram\"},{\"enabled\":true,\"id\":\"3\",\"params\":{\"customLabel\":\"Blocked\",\"field\":\"redis.info.clients.blocked\"},\"schema\":\"metric\",\"type\":\"max\"}],\"listeners\":{},\"params\":{\"addLegend\":true,\"addTimeMarker\":false,\"addTooltip\":true,\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"labels\":{\"show\":true,\"truncate\":100},\"position\":\"bottom\",\"scale\":{\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{},\"type\":\"category\"}],\"defaultYExtents\":false,\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"legendPosition\":\"right\",\"mode\":\"grouped\",\"scale\":\"linear\",\"seriesParams\":[{\"data\":{\"id\":\"1\",\"label\":\"Count\"},\"drawLinesBetweenPoints\":true,\"mode\":\"stacked\",\"show\":\"true\",\"showCircles\":true,\"type\":\"histogram\",\"valueAxis\":\"ValueAxis-1\"}],\"setYExtents\":false,\"shareYAxis\":true,\"times\":[],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"labels\":{\"filter\":false,\"rotate\":0,\"show\":true,\"truncate\":100},\"name\":\"LeftAxis-1\",\"position\":\"left\",\"scale\":{\"mode\":\"normal\",\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{\"text\":\"Count\"},\"type\":\"value\"}],\"yAxis\":{}},\"title\":\"Connected clients [Metricbeat Redis]\",\"type\":\"histogram\"}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Redis-hosts",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "Metricbeat-Redis",
-      "title": "Hosts [Metricbeat Redis]",
-      "uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
-      "version": 1,
-      "visState": "{\"title\":\"Hosts [Metricbeat Redis]\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"metricset.host\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"redis.info.server.uptime\",\"customLabel\":\"Uptime (s)\"}},{\"id\":\"6\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"redis.info.server.process_id\",\"customLabel\":\"PID\"}},{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"redis.info.memory.used.peak\",\"customLabel\":\"Memory\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"redis.info.cpu.used.user\",\"customLabel\":\"CPU used (user)\"}},{\"id\":\"5\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"redis.info.cpu.used.sys\",\"customLabel\":\"CPU used (system)\"}}],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Redis-Server-Versions",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "Metricbeat-Redis",
-      "title": "Server Versions [Metricbeat Redis]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Server Versions [Metricbeat Redis]\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"isDonut\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"metricset.host\",\"customLabel\":\"Hosts\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"redis.info.server.version\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Multiplexing API\"}}],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Redis-server-mode",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "Metricbeat-Redis",
-      "title": "Server mode [Metricbeat Redis]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Server mode [Metricbeat Redis]\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"isDonut\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"metricset.host\",\"customLabel\":\"Hosts\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"redis.info.server.mode\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Server mode\"}}],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Redis-multiplexing-API",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "Metricbeat-Redis",
-      "title": "Multiplexing API [Metricbeat Redis]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Multiplexing API [Metricbeat Redis]\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"isDonut\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"metricset.host\",\"customLabel\":\"Hosts\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"redis.info.server.multiplexing_api\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Multiplexing API\"}}],\"listeners\":{}}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Redis-Keyspaces",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "Metricbeat-Redis",
-      "title": "Keyspaces [Metricbeat Redis]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"aggs\":[{\"enabled\":true,\"id\":\"1\",\"params\":{\"customLabel\":\"Number of keys\",\"field\":\"redis.keyspace.keys\"},\"schema\":\"metric\",\"type\":\"avg\"},{\"enabled\":true,\"id\":\"2\",\"params\":{\"customInterval\":\"2h\",\"extended_bounds\":{},\"field\":\"@timestamp\",\"interval\":\"auto\",\"min_doc_count\":1},\"schema\":\"segment\",\"type\":\"date_histogram\"},{\"enabled\":true,\"id\":\"3\",\"params\":{\"customLabel\":\"Keyspaces\",\"field\":\"redis.keyspace.id\",\"order\":\"desc\",\"orderBy\":\"1\",\"size\":5},\"schema\":\"group\",\"type\":\"terms\"}],\"listeners\":{},\"params\":{\"addLegend\":true,\"addTimeMarker\":false,\"addTooltip\":true,\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"labels\":{\"show\":true,\"truncate\":100},\"position\":\"bottom\",\"scale\":{\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{},\"type\":\"category\"}],\"defaultYExtents\":false,\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"interpolate\":\"linear\",\"legendPosition\":\"right\",\"mode\":\"stacked\",\"scale\":\"linear\",\"seriesParams\":[{\"data\":{\"id\":\"1\",\"label\":\"Count\"},\"drawLinesBetweenPoints\":true,\"interpolate\":\"linear\",\"mode\":\"stacked\",\"show\":\"true\",\"showCircles\":true,\"type\":\"area\",\"valueAxis\":\"ValueAxis-1\"}],\"setYExtents\":false,\"shareYAxis\":true,\"smoothLines\":false,\"times\":[],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"labels\":{\"filter\":false,\"rotate\":0,\"show\":true,\"truncate\":100},\"name\":\"LeftAxis-1\",\"position\":\"left\",\"scale\":{\"mode\":\"normal\",\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{\"text\":\"Count\"},\"type\":\"value\"}],\"yAxis\":{}},\"title\":\"Keyspaces [Metricbeat Redis]\",\"type\":\"area\"}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "eb8277d0-c98c-11e7-9835-2f31fe08873b",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"metricbeat-*\",\"filter\":[],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
-      },
-      "title": "Service States [Metricbeat Windows]",
-      "uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
-      "version": 1,
-      "visState": "{\"aggs\":[{\"enabled\":true,\"id\":\"1\",\"params\":{\"aggregate\":\"concat\",\"customLabel\":\"Latest Report\",\"field\":\"@timestamp\",\"size\":1,\"sortField\":\"@timestamp\",\"sortOrder\":\"desc\"},\"schema\":\"metric\",\"type\":\"top_hits\"},{\"enabled\":true,\"id\":\"2\",\"params\":{\"customLabel\":\"Service\",\"field\":\"windows.service.display_name\",\"order\":\"asc\",\"orderBy\":\"_term\",\"size\":100},\"schema\":\"bucket\",\"type\":\"terms\"},{\"enabled\":true,\"id\":\"5\",\"params\":{\"customLabel\":\"Host\",\"field\":\"beat.name\",\"order\":\"desc\",\"orderBy\":\"_term\",\"size\":5},\"schema\":\"bucket\",\"type\":\"terms\"},{\"enabled\":true,\"id\":\"3\",\"params\":{\"customLabel\":\"State\",\"field\":\"windows.service.state\",\"order\":\"desc\",\"orderAgg\":{\"enabled\":true,\"id\":\"3-orderAgg\",\"params\":{\"field\":\"@timestamp\"},\"schema\":{\"aggFilter\":[\"!top_hits\",\"!percentiles\",\"!median\",\"!std_dev\",\"!derivative\",\"!moving_avg\",\"!serial_diff\",\"!cumulative_sum\",\"!avg_bucket\",\"!max_bucket\",\"!min_bucket\",\"!sum_bucket\"],\"deprecate\":false,\"editor\":false,\"group\":\"none\",\"hideCustomLabel\":true,\"max\":null,\"min\":0,\"name\":\"orderAgg\",\"params\":[],\"title\":\"Order Agg\"},\"type\":\"max\"},\"orderBy\":\"custom\",\"size\":1},\"schema\":\"bucket\",\"type\":\"terms\"},{\"enabled\":true,\"id\":\"4\",\"params\":{\"customLabel\":\"Startup Type\",\"field\":\"windows.service.start_type\",\"order\":\"desc\",\"orderAgg\":{\"enabled\":true,\"id\":\"4-orderAgg\",\"params\":{\"field\":\"@timestamp\"},\"schema\":{\"aggFilter\":[\"!top_hits\",\"!percentiles\",\"!median\",\"!std_dev\",\"!derivative\",\"!moving_avg\",\"!serial_diff\",\"!cumulative_sum\",\"!avg_bucket\",\"!max_bucket\",\"!min_bucket\",\"!sum_bucket\"],\"deprecate\":false,\"editor\":false,\"group\":\"none\",\"hideCustomLabel\":true,\"max\":null,\"min\":0,\"name\":\"orderAgg\",\"params\":[],\"title\":\"Order Agg\"},\"type\":\"max\"},\"orderBy\":\"custom\",\"size\":1},\"schema\":\"bucket\",\"type\":\"terms\"}],\"params\":{\"perPage\":10,\"showMeticsAtAllLevels\":false,\"showPartialRows\":false,\"showTotal\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"totalFunc\":\"sum\"},\"title\":\"Service States [Metricbeat Windows]\",\"type\":\"table\"}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "23a5fff0-c98e-11e7-9835-2f31fe08873b",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
-      },
-      "savedSearchId": "b6b7ccc0-c98d-11e7-9835-2f31fe08873b",
-      "title": "Hosts [Metricbeat Windows]",
-      "uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
-      "version": 1,
-      "visState": "{\"title\":\"Hosts [Metricbeat Windows]\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"windows.service.id\",\"customLabel\":\"Total Services\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"beat.name\",\"size\":100,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Host\"}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "830c45f0-c991-11e7-9835-2f31fe08873b",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
-      },
-      "savedSearchId": "b6b7ccc0-c98d-11e7-9835-2f31fe08873b",
-      "title": "Startup States [Metricbeat Windows]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Startup States [Metricbeat Windows]\",\"type\":\"pie\",\"params\":{\"type\":\"pie\",\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"isDonut\":true},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"windows.service.id\",\"customLabel\":\"Service Count\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"windows.service.start_type\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Startup Type\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"windows.service.state\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"State\"}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "35f5ad60-c996-11e7-9835-2f31fe08873b",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
-      },
-      "savedSearchId": "b6b7ccc0-c98d-11e7-9835-2f31fe08873b",
-      "title": "Unique Services [Metricbeat Windows]",
-      "uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}}",
-      "version": 1,
-      "visState": "{\"title\":\"Unique Services [Metricbeat Windows]\",\"type\":\"metric\",\"params\":{\"addTooltip\":true,\"addLegend\":false,\"type\":\"gauge\",\"gauge\":{\"verticalSplit\":false,\"autoExtend\":false,\"percentageMode\":false,\"gaugeType\":\"Metric\",\"gaugeStyle\":\"Full\",\"backStyle\":\"Full\",\"orientation\":\"vertical\",\"colorSchema\":\"Green to Red\",\"gaugeColorMode\":\"None\",\"useRange\":false,\"colorsRange\":[{\"from\":0,\"to\":100}],\"invertColors\":false,\"labels\":{\"show\":false,\"color\":\"black\"},\"scale\":{\"show\":false,\"labels\":false,\"color\":\"#333\",\"width\":2},\"type\":\"simple\",\"style\":{\"fontSize\":60,\"bgColor\":false,\"labelColor\":false,\"subText\":\"\"}}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"windows.service.id\",\"customLabel\":\"Services\"}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "c36b2ba0-ca29-11e7-9835-2f31fe08873b",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[{\"meta\":{\"index\":\"metricbeat-*\",\"negate\":false,\"disabled\":false,\"alias\":null,\"type\":\"exists\",\"key\":\"windows.service.exit_code\",\"value\":\"exists\"},\"exists\":{\"field\":\"windows.service.exit_code\"},\"$state\":{\"store\":\"appState\"}},{\"meta\":{\"index\":\"metricbeat-*\",\"negate\":true,\"disabled\":false,\"alias\":null,\"type\":\"phrase\",\"key\":\"windows.service.exit_code\",\"value\":\"0\",\"params\":{\"query\":\"0\",\"type\":\"phrase\"}},\"query\":{\"match\":{\"windows.service.exit_code\":{\"query\":\"0\",\"type\":\"phrase\"}}},\"$state\":{\"store\":\"appState\"}},{\"meta\":{\"index\":\"metricbeat-*\",\"negate\":true,\"disabled\":false,\"alias\":null,\"type\":\"phrase\",\"key\":\"windows.service.exit_code\",\"value\":\"ERROR_SERVICE_NEVER_STARTED\",\"params\":{\"query\":\"ERROR_SERVICE_NEVER_STARTED\",\"type\":\"phrase\"}},\"query\":{\"match\":{\"windows.service.exit_code\":{\"query\":\"ERROR_SERVICE_NEVER_STARTED\",\"type\":\"phrase\"}}},\"$state\":{\"store\":\"appState\"}}],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
-      },
-      "savedSearchId": "b6b7ccc0-c98d-11e7-9835-2f31fe08873b",
-      "title": "Non-zero Service Exit Codes [Metricbeat Windows]",
-      "uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}}",
-      "version": 1,
-      "visState": "{\"title\":\"Non-zero Service Exit Codes [Metricbeat Windows]\",\"type\":\"metric\",\"params\":{\"addTooltip\":true,\"addLegend\":false,\"type\":\"gauge\",\"gauge\":{\"verticalSplit\":false,\"autoExtend\":false,\"percentageMode\":false,\"gaugeType\":\"Metric\",\"gaugeStyle\":\"Full\",\"backStyle\":\"Full\",\"orientation\":\"vertical\",\"colorSchema\":\"Green to Red\",\"gaugeColorMode\":\"None\",\"useRange\":false,\"colorsRange\":[{\"from\":0,\"to\":100}],\"invertColors\":false,\"labels\":{\"show\":false,\"color\":\"black\"},\"scale\":{\"show\":false,\"labels\":false,\"color\":\"#333\",\"width\":2},\"type\":\"simple\",\"style\":{\"fontSize\":60,\"bgColor\":false,\"labelColor\":false,\"subText\":\"\"}}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"windows.service.id\",\"customLabel\":\"Non-zero Exit Codes\"}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "e784dc50-0005-11e7-bf7f-c9acc3d3e306",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "67e88e60-0005-11e7-aaf1-b342e4b94bb0",
-      "title": "Active connections [Metricbeat MySQL]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Active connections [Metricbeat MySQL]\",\"type\":\"line\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"showCircles\":true,\"interpolate\":\"linear\",\"scale\":\"linear\",\"drawLinesBetweenPoints\":true,\"radiusRatio\":9,\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"type\":\"line\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"line\",\"mode\":\"normal\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}]},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mysql.status.connections\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "0f506420-0006-11e7-bf7f-c9acc3d3e306",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "67e88e60-0005-11e7-aaf1-b342e4b94bb0",
-      "title": "Files opened [Metricbeat MySQL]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Files opened [Metricbeat MySQL]\",\"type\":\"line\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"showCircles\":true,\"interpolate\":\"linear\",\"scale\":\"linear\",\"drawLinesBetweenPoints\":true,\"radiusRatio\":9,\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"type\":\"line\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"line\",\"mode\":\"normal\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}]},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mysql.status.open.files\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "45a00c10-0006-11e7-bf7f-c9acc3d3e306",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "67e88e60-0005-11e7-aaf1-b342e4b94bb0",
-      "title": "Running threads [Metricbeat MySQL]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Running threads [Metricbeat MySQL]\",\"type\":\"line\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"showCircles\":true,\"interpolate\":\"linear\",\"scale\":\"linear\",\"drawLinesBetweenPoints\":true,\"radiusRatio\":9,\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"type\":\"line\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"line\",\"mode\":\"normal\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}]},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"mysql.status.threads.running\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "1eda2280-0008-11e7-82f3-2f380154876c",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "MySQL commands [Metricbeat MySQL]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"MySQL commands [Metricbeat MySQL]\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(*, metric='avg:mysql.status.command.select').derivative().label(\\\"SELECT\\\"),.es(*, metric='avg:mysql.status.command.insert').derivative().label(\\\"INSERT\\\"),.es(*, metric='avg:mysql.status.command.update').derivative().label(\\\"UPDATE\\\"),.es(*, metric='avg:mysql.status.command.delete').derivative().label(\\\"DELETE\\\")\",\"interval\":\"1m\"},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "1ede99e0-0009-11e7-8cd4-73b67e9e3f3c",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Threads created [Metricbeat MySQL]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Threads created [Metricbeat MySQL]\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(*, metric='avg:mysql.status.threads.created').derivative().label(\\\"Threads created\\\")\",\"interval\":\"1m\"},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "4c36c420-000a-11e7-8cd4-73b67e9e3f3c",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Tables opened [Metricbeat MySQL]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Tables opened [Metricbeat MySQL]\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(*, metric='avg:mysql.status.opened_tables').derivative().label(\\\"Opened tables\\\")\",\"interval\":\"1m\"},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "a2175300-000a-11e7-b001-85aac4878445",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      },
-      "title": "Sent and received bytes [Metricbeat MySQL]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Sent and received bytes [Metricbeat MySQL]\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(*,metric=\\\"avg:mysql.status.bytes.sent\\\").derivative().divide(1000).label(\\\"Sent bytes (KB)\\\"),.es(*,metric=\\\"avg:mysql.status.bytes.received\\\").derivative().multiply(-1).divide(1000).label(\\\"Received bytes (KB)\\\")\",\"interval\":\"1m\"},\"aggs\":[]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "RabbitMQ-Memory-Usage",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\n  \"filter\": []\n}"
-      },
-      "savedSearchId": "Metricbeat-Rabbitmq",
-      "title": "Memory Usage [Metricbeat RabbitMQ]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\n  \"title\": \"RabbitMQ Memory Usage\",\n  \"type\": \"line\",\n  \"params\": {\n    \"shareYAxis\": true,\n    \"addTooltip\": true,\n    \"addLegend\": true,\n    \"legendPosition\": \"top\",\n    \"showCircles\": false,\n    \"smoothLines\": true,\n    \"interpolate\": \"linear\",\n    \"scale\": \"linear\",\n    \"drawLinesBetweenPoints\": true,\n    \"radiusRatio\": 9,\n    \"times\": [],\n    \"addTimeMarker\": false,\n    \"defaultYExtents\": true,\n    \"setYExtents\": false,\n    \"yAxis\": {}\n  },\n  \"aggs\": [\n    {\n      \"id\": \"1\",\n      \"enabled\": true,\n      \"type\": \"max\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"rabbitmq.node.mem.used.bytes\",\n        \"json\": \"\",\n        \"customLabel\": \"Used memory\"\n      }\n    },\n    {\n      \"id\": \"3\",\n      \"enabled\": true,\n      \"type\": \"date_histogram\",\n      \"schema\": \"segment\",\n      \"params\": {\n        \"field\": \"@timestamp\",\n        \"interval\": \"custom\",\n        \"customInterval\": \"30s\",\n        \"min_doc_count\": 1,\n        \"extended_bounds\": {}\n      }\n    },\n    {\n      \"id\": \"4\",\n      \"enabled\": true,\n      \"type\": \"terms\",\n      \"schema\": \"group\",\n      \"params\": {\n        \"field\": \"rabbitmq.node.name\",\n        \"size\": 5,\n        \"order\": \"desc\",\n        \"orderBy\": \"1\",\n        \"customLabel\": \"Node name\"\n      }\n    }\n  ],\n  \"listeners\": {}\n}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Rabbitmq-Number-of-Nodes",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\n  \"filter\": []\n}"
-      },
-      "savedSearchId": "Metricbeat-Rabbitmq",
-      "title": "Number of Nodes [Metricbeat RabbitMQ]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\n  \"title\": \"Rabbitmq-Number-of-Nodes\",\n  \"type\": \"metric\",\n  \"params\": {\n    \"handleNoResults\": true,\n    \"fontSize\": 60\n  },\n  \"aggs\": [\n    {\n      \"id\": \"1\",\n      \"enabled\": true,\n      \"type\": \"cardinality\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"rabbitmq.node.name\",\n        \"customLabel\": \"RabbitMQ Nodes\"\n      }\n    }\n  ],\n  \"listeners\": {}\n}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "RabbitMQ-Erlang-Process-Usage",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\n  \"filter\": []\n}"
-      },
-      "savedSearchId": "Metricbeat-Rabbitmq",
-      "title": "Erlang Process Usage [Metricbeat RabbitMQ]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\n  \"title\": \"RabbitMQ Erlang Process Usage\",\n  \"type\": \"line\",\n  \"params\": {\n    \"addLegend\": true,\n    \"addTimeMarker\": false,\n    \"addTooltip\": true,\n    \"defaultYExtents\": false,\n    \"drawLinesBetweenPoints\": true,\n    \"interpolate\": \"linear\",\n    \"legendPosition\": \"top\",\n    \"radiusRatio\": 9,\n    \"scale\": \"linear\",\n    \"setYExtents\": false,\n    \"shareYAxis\": true,\n    \"showCircles\": false,\n    \"smoothLines\": true,\n    \"times\": [],\n    \"yAxis\": {}\n  },\n  \"aggs\": [\n    {\n      \"id\": \"1\",\n      \"enabled\": true,\n      \"type\": \"max\",\n      \"schema\": \"metric\",\n      \"params\": {\n        \"field\": \"rabbitmq.node.proc.used\",\n        \"customLabel\": \"Used Process\"\n      }\n    },\n    {\n      \"id\": \"3\",\n      \"enabled\": true,\n      \"type\": \"date_histogram\",\n      \"schema\": \"segment\",\n      \"params\": {\n        \"field\": \"@timestamp\",\n        \"interval\": \"custom\",\n        \"customInterval\": \"30s\",\n        \"min_doc_count\": 1,\n        \"extended_bounds\": {}\n      }\n    },\n    {\n      \"id\": \"4\",\n      \"enabled\": true,\n      \"type\": \"terms\",\n      \"schema\": \"group\",\n      \"params\": {\n        \"field\": \"rabbitmq.node.name\",\n        \"size\": 5,\n        \"order\": \"desc\",\n        \"orderBy\": \"1\",\n        \"customLabel\": \"Node name\"\n      }\n    }\n  ],\n  \"listeners\": {}\n}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "RabbitMQ-Queue-Index-Operations",
-    "_type": "visualization",
-    "_source": {
-      "description": "",
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[]}"
-      },
-      "savedSearchId": "Metricbeat-Rabbitmq",
-      "title": "Queue Index Operations [Metricbeat RabbitMQ]",
-      "uiStateJSON": "{}",
-      "version": 1,
-      "visState": "{\"title\":\"Queue Index Operations [Metricbeat RabbitMQ]\",\"type\":\"line\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"top\",\"showCircles\":false,\"smoothLines\":false,\"interpolate\":\"linear\",\"scale\":\"linear\",\"drawLinesBetweenPoints\":true,\"radiusRatio\":9,\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{},\"type\":\"line\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"line\",\"mode\":\"normal\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}]},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"rabbitmq.node.queue.index.read.count\",\"customLabel\":\"Queue Index Read\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"custom\",\"customInterval\":\"30s\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"rabbitmq.node.queue.index.journal_write.count\",\"customLabel\":\"Queue Index Jornal Write\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"rabbitmq.node.queue.index.write.count\",\"customLabel\":\"Queue Index Write\"}}]}"
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "855899e0-1b1c-11e7-b09e-037021c4f8df",
-    "_type": "visualization",
-    "_source": {
-      "title": "Top Hosts By CPU (Realtime) [Metricbeat System]",
-      "visState": "{\"title\":\"Top Hosts By CPU (Realtime) [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"31e5afa0-1b1c-11e7-b09e-037021c4f8df\",\"type\":\"top_n\",\"series\":[{\"id\":\"31e5afa1-1b1c-11e7-b09e-037021c4f8df\",\"color\":\"#68BC00\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"31e5afa2-1b1c-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.cpu.user.pct\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"percent\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"terms_field\":\"beat.hostname.keyword\",\"terms_order_by\":\"31e5afa2-1b1c-11e7-b09e-037021c4f8df\",\"terms_size\":\"10\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"bar_color_rules\":[{\"value\":0,\"id\":\"33349dd0-1b1c-11e7-b09e-037021c4f8df\",\"bar_color\":\"rgba(104,188,0,1)\",\"opperator\":\"gte\"},{\"value\":0.6,\"id\":\"997dc440-1b1c-11e7-b09e-037021c4f8df\",\"bar_color\":\"rgba(254,146,0,1)\",\"opperator\":\"gte\"},{\"value\":0.85,\"id\":\"a10d7f20-1b1c-11e7-b09e-037021c4f8df\",\"bar_color\":\"rgba(211,49,21,1)\",\"opperator\":\"gte\"}],\"drilldown_url\":\"../app/kibana#/dashboard/79ffd6e0-faa0-11e6-947f-177f697178b8?_a=(query:(query_string:(analyze_wildcard:!t,query:'beat.name:\\\"{{key}}\\\"')))\",\"filter\":\"\",\"show_grid\":1},\"aggs\":[]}",
-      "uiStateJSON": "{}",
-      "description": "",
-      "version": 1,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "c6f2ffd0-4d17-11e7-a196-69b9a7a020a9",
-    "_type": "visualization",
-    "_source": {
-      "title": "Number of hosts [Metricbeat System]",
-      "visState": "{\"title\":\"Number of hosts [Metricbeat System]\",\"type\":\"metric\",\"params\":{\"addTooltip\":true,\"addLegend\":false,\"type\":\"metric\",\"metric\":{\"percentageMode\":false,\"colorSchema\":\"Green to Red\",\"useRange\":false,\"colorsRange\":[{\"from\":0,\"to\":100}],\"invertColors\":false,\"labels\":{\"show\":false,\"color\":\"black\"},\"style\":{\"fontSize\":\"63\",\"bgFill\":\"#000\",\"bgColor\":false,\"labelColor\":false,\"subText\":\"\"},\"metricColorMode\":\"None\"}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"beat.hostname.keyword\",\"customLabel\":\"Number of hosts\"}}]}",
-      "uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}}",
-      "description": "",
-      "version": 1,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"metricbeat-*\",\"query\":{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true,\"default_field\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Docker-CPU-usage",
-    "_type": "visualization",
+    "_id": "572efb40-c7cd-11e8-af80-b5004896a534",
+    "_type": "dashboard",
     "_source": {
-      "title": "CPU usage [Metricbeat Docker]",
-      "visState": "{\"title\":\"CPU usage [Metricbeat Docker]\",\"type\":\"area\",\"params\":{\"addLegend\":true,\"addTimeMarker\":false,\"addTooltip\":true,\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"labels\":{\"show\":true,\"truncate\":100},\"position\":\"bottom\",\"scale\":{\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{},\"type\":\"category\"}],\"defaultYExtents\":false,\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"interpolate\":\"linear\",\"legendPosition\":\"top\",\"mode\":\"stacked\",\"scale\":\"linear\",\"seriesParams\":[{\"data\":{\"id\":\"1\",\"label\":\"Total CPU time\"},\"drawLinesBetweenPoints\":true,\"interpolate\":\"linear\",\"mode\":\"stacked\",\"show\":\"true\",\"showCircles\":true,\"type\":\"area\",\"valueAxis\":\"ValueAxis-1\"}],\"setYExtents\":false,\"shareYAxis\":true,\"smoothLines\":true,\"times\":[],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"labels\":{\"filter\":false,\"rotate\":0,\"show\":true,\"truncate\":100},\"name\":\"LeftAxis-1\",\"position\":\"left\",\"scale\":{\"mode\":\"normal\",\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{\"text\":\"Total CPU time\"},\"type\":\"value\"}],\"yAxis\":{},\"type\":\"area\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"percentiles\",\"schema\":\"metric\",\"params\":{\"field\":\"docker.cpu.total.pct\",\"percents\":[75],\"customLabel\":\"Total CPU time\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"docker.container.name.keyword\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":15,\"order\":\"desc\",\"orderBy\":\"_term\",\"customLabel\":\"Container name\"}}]}",
-      "uiStateJSON": "{}",
+      "title": "Processing statistics [nomad]",
+      "hits": 0,
       "description": "",
+      "panelsJSON": "[{\"panelIndex\":\"1\",\"gridData\":{\"x\":0,\"y\":7,\"w\":24,\"h\":15,\"i\":\"1\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"8568ba90-c61b-11e8-af80-b5004896a534\",\"embeddableConfig\":{}},{\"panelIndex\":\"3\",\"gridData\":{\"x\":24,\"y\":7,\"w\":24,\"h\":15,\"i\":\"3\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"7a71d990-c7c0-11e8-af80-b5004896a534\",\"embeddableConfig\":{}},{\"panelIndex\":\"4\",\"gridData\":{\"x\":19,\"y\":22,\"w\":14,\"h\":15,\"i\":\"4\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"a2e3f4e0-c7b0-11e8-af80-b5004896a534\",\"embeddableConfig\":{}},{\"panelIndex\":\"5\",\"gridData\":{\"x\":0,\"y\":22,\"w\":19,\"h\":15,\"i\":\"5\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"50c32d20-c7b0-11e8-af80-b5004896a534\",\"embeddableConfig\":{}},{\"panelIndex\":\"6\",\"gridData\":{\"x\":0,\"y\":0,\"w\":10,\"h\":7,\"i\":\"6\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"b06a49b0-c2f2-11e8-b9e8-d74f7e7408ed\",\"embeddableConfig\":{}},{\"panelIndex\":\"7\",\"gridData\":{\"x\":10,\"y\":0,\"w\":10,\"h\":7,\"i\":\"7\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"1103dc60-c2f2-11e8-b9e8-d74f7e7408ed\",\"embeddableConfig\":{}},{\"panelIndex\":\"8\",\"gridData\":{\"x\":20,\"y\":0,\"w\":9,\"h\":7,\"i\":\"8\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"fd77bd00-c67b-11e8-af80-b5004896a534\",\"embeddableConfig\":{}},{\"panelIndex\":\"9\",\"gridData\":{\"x\":33,\"y\":22,\"w\":15,\"h\":15,\"i\":\"9\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"d99f6c10-c7b2-11e8-af80-b5004896a534\",\"embeddableConfig\":{}}]",
+      "optionsJSON": "{\"darkTheme\":false,\"useMargins\":true,\"hidePanelTitles\":false}",
       "version": 1,
+      "timeRestore": false,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"index\":\"metricbeat-*\",\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647},\"query\":{\"query\":{\"query_string\":{\"query\":\"metricset.module:docker AND metricset.name:cpu\",\"analyze_wildcard\":true,\"default_field\":\"*\"}},\"language\":\"lucene\"}}"
+        "searchSourceJSON": "{\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[],\"highlightAll\":true,\"version\":true}"
       }
     },
     "_meta": {
@@ -1930,16 +19,18 @@
     }
   },
   {
-    "_id": "Docker-memory-usage",
-    "_type": "visualization",
+    "_id": "ce75d080-c2ef-11e8-b9e8-d74f7e7408ed",
+    "_type": "dashboard",
     "_source": {
-      "title": "Memory usage [Metricbeat Docker]",
-      "visState": "{\"title\":\"Memory usage [Metricbeat Docker]\",\"type\":\"area\",\"params\":{\"addLegend\":true,\"addTimeMarker\":false,\"addTooltip\":true,\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"labels\":{\"show\":true,\"truncate\":100},\"position\":\"bottom\",\"scale\":{\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{},\"type\":\"category\"}],\"defaultYExtents\":false,\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"interpolate\":\"linear\",\"legendPosition\":\"top\",\"mode\":\"stacked\",\"scale\":\"linear\",\"seriesParams\":[{\"data\":{\"id\":\"1\",\"label\":\"Memory\"},\"drawLinesBetweenPoints\":true,\"interpolate\":\"linear\",\"mode\":\"stacked\",\"show\":\"true\",\"showCircles\":true,\"type\":\"area\",\"valueAxis\":\"ValueAxis-1\"}],\"setYExtents\":false,\"shareYAxis\":true,\"smoothLines\":false,\"times\":[],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"labels\":{\"filter\":false,\"rotate\":0,\"show\":true,\"truncate\":100},\"name\":\"LeftAxis-1\",\"position\":\"left\",\"scale\":{\"mode\":\"normal\",\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{\"text\":\"Memory\"},\"type\":\"value\"}],\"yAxis\":{},\"type\":\"area\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"docker.memory.usage.total\",\"customLabel\":\"Memory\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"docker.container.name.keyword\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":15,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Container name\"}}]}",
-      "uiStateJSON": "{}",
+      "title": "Processing errors [nomad]",
+      "hits": 0,
       "description": "",
+      "panelsJSON": "[{\"panelIndex\":\"1\",\"gridData\":{\"x\":0,\"y\":0,\"w\":24,\"h\":14,\"i\":\"1\"},\"embeddableConfig\":{\"vis\":{\"legendOpen\":false}},\"id\":\"b16c30b0-c2ef-11e8-b9e8-d74f7e7408ed\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"3\",\"gridData\":{\"x\":37,\"y\":14,\"w\":11,\"h\":7,\"i\":\"3\"},\"embeddableConfig\":{\"spy\":null},\"id\":\"1103dc60-c2f2-11e8-b9e8-d74f7e7408ed\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"7\",\"gridData\":{\"x\":37,\"y\":21,\"w\":11,\"h\":7,\"i\":\"7\"},\"embeddableConfig\":{},\"id\":\"fd77bd00-c67b-11e8-af80-b5004896a534\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"12\",\"gridData\":{\"x\":0,\"y\":14,\"w\":37,\"h\":21,\"i\":\"12\"},\"embeddableConfig\":{},\"id\":\"18785d30-c7cb-11e8-af80-b5004896a534\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"13\",\"gridData\":{\"x\":37,\"y\":28,\"w\":11,\"h\":7,\"i\":\"13\"},\"embeddableConfig\":{},\"id\":\"b06a49b0-c2f2-11e8-b9e8-d74f7e7408ed\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"14\",\"gridData\":{\"x\":24,\"y\":0,\"w\":24,\"h\":14,\"i\":\"14\"},\"embeddableConfig\":{\"vis\":{\"legendOpen\":false}},\"id\":\"8bcaf100-c7d3-11e8-af80-b5004896a534\",\"type\":\"visualization\",\"version\":\"6.3.2\"},{\"panelIndex\":\"15\",\"gridData\":{\"x\":0,\"y\":35,\"w\":24,\"h\":15,\"i\":\"15\"},\"version\":\"6.3.2\",\"type\":\"visualization\",\"id\":\"b73d0810-c7d2-11e8-af80-b5004896a534\",\"embeddableConfig\":{}}]",
+      "optionsJSON": "{\"darkTheme\":false,\"hidePanelTitles\":false,\"useMargins\":true}",
       "version": 1,
+      "timeRestore": false,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"index\":\"metricbeat-*\",\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647},\"query\":{\"query\":{\"query_string\":{\"query\":\"metricset.module:docker AND metricset.name:memory\",\"analyze_wildcard\":true,\"default_field\":\"*\"}},\"language\":\"lucene\"}}"
+        "searchSourceJSON": "{\"query\":{\"language\":\"lucene\",\"query\":\"\"},\"filter\":[],\"highlightAll\":true,\"version\":true}"
       }
     },
     "_meta": {
@@ -1947,16 +38,26 @@
     }
   },
   {
-    "_id": "7cdb1330-4d1a-11e7-a196-69b9a7a020a9",
-    "_type": "visualization",
+    "_id": "f12712d0-db70-11e8-86df-271c4963dd2a",
+    "_type": "search",
     "_source": {
-      "title": "Hosts histogram by CPU usage [Metricbeat System]",
-      "visState": "{\"title\":\"Hosts histogram by CPU usage [Metricbeat System]\",\"type\":\"heatmap\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"enableHover\":false,\"legendPosition\":\"right\",\"times\":[],\"colorsNumber\":4,\"colorSchema\":\"Greens\",\"setColorRange\":false,\"colorsRange\":[],\"invertColors\":false,\"percentageMode\":false,\"valueAxes\":[{\"show\":false,\"id\":\"ValueAxis-1\",\"type\":\"value\",\"scale\":{\"type\":\"linear\",\"defaultYExtents\":false},\"labels\":{\"show\":false,\"rotate\":0,\"color\":\"#555\",\"overwriteColor\":false}}],\"type\":\"heatmap\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.cpu.user.pct\",\"customLabel\":\"CPU usage\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"beat.hostname.keyword\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":20,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Hosts\"}}]}",
-      "uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0% - 400%\":\"rgb(247,252,245)\",\"400% - 700%\":\"rgb(199,233,192)\",\"700% - 1,000%\":\"rgb(116,196,118)\",\"1,000% - 1,300%\":\"rgb(35,139,69)\"}}}",
+      "title": "nomad example search",
       "description": "",
+      "hits": 0,
+      "columns": [
+        "nomad.service",
+        "event",
+        "level",
+        "logger_name",
+        "nomad.archive_id"
+      ],
+      "sort": [
+        "@timestamp",
+        "desc"
+      ],
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"metricbeat-*\",\"query\":{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true,\"default_field\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
+        "searchSourceJSON": "{\"index\":\"f02ffa10-02ab-11e9-8c88-c573528f3b8d\",\"highlightAll\":true,\"version\":true,\"query\":{\"query\":\"\",\"language\":\"lucene\"},\"filter\":[{\"meta\":{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"type\":\"phrases\",\"key\":\"nomad.service.keyword\",\"value\":\"nomad_worker, nomad_api\",\"params\":[\"nomad_worker\",\"nomad_api\"],\"negate\":false,\"disabled\":false,\"alias\":null},\"query\":{\"bool\":{\"should\":[{\"match_phrase\":{\"nomad.service.keyword\":\"nomad_worker\"}},{\"match_phrase\":{\"nomad.service.keyword\":\"nomad_api\"}}],\"minimum_should_match\":1}},\"$state\":{\"store\":\"appState\"}}]}"
       }
     },
     "_meta": {
@@ -1964,34 +65,16 @@
     }
   },
   {
-    "_id": "fe064790-1b1f-11e7-bec4-a5e9ec5cab8b",
+    "_id": "7a71d990-c7c0-11e8-af80-b5004896a534",
     "_type": "visualization",
     "_source": {
-      "title": "Top Hosts By Memory (Realtime) [Metricbeat System]",
-      "visState": "{\"title\":\"Top Hosts By Memory (Realtime) [Metricbeat System]\",\"type\":\"metrics\",\"params\":{\"id\":\"31e5afa0-1b1c-11e7-b09e-037021c4f8df\",\"type\":\"top_n\",\"series\":[{\"id\":\"31e5afa1-1b1c-11e7-b09e-037021c4f8df\",\"color\":\"#68BC00\",\"split_mode\":\"terms\",\"metrics\":[{\"id\":\"31e5afa2-1b1c-11e7-b09e-037021c4f8df\",\"type\":\"avg\",\"field\":\"system.memory.actual.used.pct\"}],\"seperate_axis\":0,\"axis_position\":\"right\",\"formatter\":\"percent\",\"chart_type\":\"line\",\"line_width\":1,\"point_size\":1,\"fill\":0.5,\"stacked\":\"none\",\"terms_field\":\"beat.hostname.keyword\",\"terms_order_by\":\"31e5afa2-1b1c-11e7-b09e-037021c4f8df\",\"terms_size\":\"10\"}],\"time_field\":\"@timestamp\",\"index_pattern\":\"*\",\"interval\":\"auto\",\"axis_position\":\"left\",\"axis_formatter\":\"number\",\"show_legend\":1,\"bar_color_rules\":[{\"value\":0,\"id\":\"33349dd0-1b1c-11e7-b09e-037021c4f8df\",\"bar_color\":\"rgba(104,188,0,1)\",\"opperator\":\"gte\"},{\"value\":0.6,\"id\":\"997dc440-1b1c-11e7-b09e-037021c4f8df\",\"bar_color\":\"rgba(254,146,0,1)\",\"opperator\":\"gte\"},{\"value\":0.85,\"id\":\"a10d7f20-1b1c-11e7-b09e-037021c4f8df\",\"bar_color\":\"rgba(211,49,21,1)\",\"opperator\":\"gte\"}],\"drilldown_url\":\"../app/kibana#/dashboard/79ffd6e0-faa0-11e6-947f-177f697178b8?_a=(query:(query_string:(analyze_wildcard:!t,query:'beat.name:\\\"{{key}}\\\"')))\",\"filter\":\"\",\"show_grid\":1},\"aggs\":[]}",
+      "title": "Max exec times [nomad]",
+      "visState": "{\"title\":\"Max exec times [nomad]\",\"type\":\"vega\",\"params\":{\"spec\":\"{\\n  \\\"$schema\\\": \\\"https://vega.github.io/schema/vega/v3.json\\\",\\n  \\\"data\\\": [{\\n    \\\"name\\\": \\\"results\\\",\\n      \\\"url\\\": {\\n        \\\"index\\\": \\\"logstash-*\\\",\\n        \\\"body\\\": {\\n          \\\"size\\\": 0,\\n          \\\"query\\\": {\\n            terms : {\\n              \\\"nomad.processing.current_task\\\": [\\\"parsing\\\", \\\"normalizing\\\"]\\n            }\\n          }\\n        \\t\\\"aggs\\\": {\\n        \\t\\t\\\"exec_time_per_step\\\": {\\n        \\t\\t\\t\\\"terms\\\": {\\n        \\t\\t\\t\\t\\\"field\\\": \\\"nomad.processing.step.keyword\\\"\\n        \\t\\t\\t},\\n        \\t\\t\\t\\\"aggs\\\": {\\n        \\t\\t\\t\\t\\\"quantile\\\": {\\n        \\t\\t\\t\\t\\t\\\"percentiles\\\": {\\n        \\t\\t\\t\\t\\t\\t\\\"field\\\": \\\"nomad.processing.exec_time\\\",\\n        \\t\\t\\t\\t\\t\\t\\\"percents\\\": [\\n        \\t\\t\\t\\t\\t\\t\\t25,\\n        \\t\\t\\t\\t\\t\\t\\t50,\\n        \\t\\t\\t\\t\\t\\t\\t75\\n        \\t\\t\\t\\t\\t\\t]\\n        \\t\\t\\t\\t\\t}\\n        \\t\\t\\t\\t},\\n        \\t\\t\\t\\t\\\"min_value\\\":{\\n        \\t\\t\\t\\t\\t\\\"min\\\": {\\n        \\t\\t\\t\\t\\t\\t\\\"field\\\": \\\"nomad.processing.exec_time\\\"\\n        \\t\\t\\t\\t\\t}\\n        \\t\\t\\t\\t},\\n        \\t\\t\\t\\t\\\"max_value\\\":{\\n        \\t\\t\\t\\t\\t\\\"max\\\": {\\n        \\t\\t\\t\\t\\t\\t\\\"field\\\": \\\"nomad.processing.exec_time\\\"\\n        \\t\\t\\t\\t\\t}\\n        \\t\\t\\t\\t},\\n        \\t\\t\\t\\t\\\"max_value_sort\\\": {\\n          \\t\\t\\t\\t\\\"bucket_sort\\\": {\\n              \\t\\t\\t\\\"sort\\\": [\\n                      {\\\"max_value\\\": {\\\"order\\\": \\\"desc\\\"}}\\n                    ],\\n                    \\\"size\\\": 10\\n                  }\\n                }\\n        \\t\\t\\t}\\n        \\t\\t}\\n        \\t}\\n        }\\n      },\\n      \\\"format\\\": {\\n        \\\"property\\\": \\\"aggregations.exec_time_per_step.buckets\\\"\\n      },\\n      \\\"transform\\\":[\\n        {\\n          \\\"type\\\": \\\"formula\\\",\\n          \\\"expr\\\": \\\"datum.quantile.values['25.0']\\\"\\n          \\\"as\\\": \\\"q1\\\"\\n        },\\n        {\\n          \\\"type\\\": \\\"formula\\\",\\n          \\\"expr\\\": \\\"datum.quantile.values['50.0']\\\"\\n          \\\"as\\\": \\\"median\\\"\\n        },\\n        {\\n          \\\"type\\\": \\\"formula\\\",\\n          \\\"expr\\\": \\\"datum.quantile.values['75.0']\\\"\\n          \\\"as\\\": \\\"q3\\\"\\n        },\\n        {\\n          \\\"type\\\": \\\"formula\\\",\\n          \\\"expr\\\": \\\"datum.min_value.value\\\"\\n          \\\"as\\\": \\\"min_value\\\"\\n        },\\n        {\\n          \\\"type\\\": \\\"formula\\\",\\n          \\\"expr\\\": \\\"datum.max_value.value\\\"\\n          \\\"as\\\": \\\"max_value\\\"\\n        },\\n        {\\n          \\\"type\\\": \\\"formula\\\",\\n          \\\"expr\\\": \\\"datum.key\\\",\\n          \\\"as\\\": \\\"step\\\"\\n        },\\n        {\\n        \\\"type\\\": \\\"fold\\\",\\n        \\\"fields\\\": [\\\"min_value\\\", \\\"q1\\\", \\\"median\\\", \\\"q3\\\", \\\"max_value\\\"],\\n         \\\"as\\\": [\\\"metric\\\", \\\"metricValue\\\"]\\n         }\\n      ]\\n    }],\\n  \\\"scales\\\": [\\n    {\\n      \\\"name\\\": \\\"layout\\\",\\n      \\\"type\\\": \\\"band\\\",\\n      \\\"range\\\": \\\"height\\\",\\n      \\\"domain\\\": {\\\"data\\\": \\\"results\\\", \\\"field\\\": \\\"key\\\"}\\n    },\\n    {\\n      \\\"name\\\": \\\"xscale\\\",\\n      \\\"type\\\": \\\"log\\\",\\n      \\\"range\\\": \\\"width\\\", \\\"round\\\": true,\\n      \\\"domain\\\": {\\\"data\\\": \\\"results\\\", \\\"field\\\": \\\"metricValue\\\"},\\n      \\\"zero\\\": false, \\\"nice\\\": true\\n    },\\n    {\\n      \\\"name\\\": \\\"color\\\",\\n      \\\"type\\\": \\\"ordinal\\\",\\n      \\\"range\\\": \\\"category\\\"\\n    }\\n  ],\\n  \\\"axes\\\": [\\n    {\\\"orient\\\": \\\"bottom\\\", \\\"scale\\\": \\\"xscale\\\", \\\"zindex\\\": 1, \\\"format\\\": \\\"s\\\", \\\"title\\\": \\\"Exec time in s\\\"},\\n    {\\\"orient\\\": \\\"left\\\", \\\"scale\\\": \\\"layout\\\", \\\"tickCount\\\": 20, \\\"zindex\\\": 1}\\n  ],\\n  \\\"marks\\\": [\\n    {\\n      \\\"type\\\": \\\"group\\\",\\n      \\\"from\\\": {\\n        \\\"facet\\\": {\\n          \\\"data\\\": \\\"results\\\",\\n          \\\"name\\\": \\\"meters\\\",\\n          \\\"groupby\\\": \\\"key\\\"\\n        }\\n      },\\n\\n      \\\"encode\\\": {\\n        \\\"enter\\\": {\\n          \\\"yc\\\": {\\\"scale\\\": \\\"layout\\\", \\\"field\\\": \\\"key\\\", \\\"band\\\": 0.5},\\n          // \\\"height\\\": {\\\"signal\\\": \\\"plotWidth\\\"},\\n          // \\\"width\\\": {\\\"signal\\\": \\\"width\\\"}\\n        }\\n      },\\n\\n      \\\"data\\\": [\\n        {\\n          \\\"name\\\": \\\"summary\\\",\\n          \\\"source\\\": \\\"meters\\\",\\n          \\\"transform\\\": [\\n            {\\n              \\\"type\\\": \\\"aggregate\\\",\\n              \\\"fields\\\": [\\\"metricValue\\\", \\\"metricValue\\\", \\\"metricValue\\\", \\\"metricValue\\\", \\\"metricValue\\\"],\\n              \\\"ops\\\": [\\\"min\\\", \\\"q1\\\", \\\"median\\\", \\\"q3\\\", \\\"max\\\"],\\n              \\\"as\\\": [\\\"min\\\", \\\"q1\\\", \\\"median\\\", \\\"q3\\\", \\\"max\\\"]\\n            }\\n          ]\\n        }\\n      ],\\n\\n      \\\"marks\\\": [\\n        {\\n          \\\"type\\\": \\\"rect\\\",\\n          \\\"from\\\": {\\\"data\\\": \\\"summary\\\"},\\n          \\\"encode\\\": {\\n            \\\"enter\\\": {\\n              \\\"fill\\\": {\\\"value\\\": \\\"black\\\"},\\n              \\\"height\\\": {\\\"value\\\": 1}\\n            },\\n            \\\"update\\\": {\\n              \\\"x\\\": {\\\"scale\\\": \\\"xscale\\\", \\\"field\\\": \\\"min\\\"},\\n              \\\"x2\\\": {\\\"scale\\\": \\\"xscale\\\", \\\"field\\\": \\\"max\\\"},\\n              \\\"zindex\\\": 1\\n            }\\n          }\\n        },\\n        {\\n          \\\"type\\\": \\\"rect\\\",\\n          \\\"from\\\": {\\\"data\\\": \\\"summary\\\"},\\n          \\\"encode\\\": {\\n            \\\"enter\\\": {\\n              \\\"stroke\\\": {\\\"value\\\": \\\"black\\\"}\\n              \\\"fill\\\": {\\\"value\\\": \\\"white\\\"}\\n            },\\n            \\\"update\\\": {\\n              \\\"height\\\": {\\\"value\\\": 20},\\n              \\\"yc\\\": {\\\"offset\\\": -0.5}\\n              \\\"x\\\": {\\\"scale\\\": \\\"xscale\\\", \\\"field\\\": \\\"q1\\\"},\\n              \\\"x2\\\": {\\\"scale\\\": \\\"xscale\\\", \\\"field\\\": \\\"q3\\\"}\\n              \\n            }\\n          }\\n        },\\n        {\\n          \\\"type\\\": \\\"rect\\\",\\n          \\\"from\\\": {\\\"data\\\": \\\"summary\\\"},\\n          \\\"encode\\\": {\\n            \\\"enter\\\": {\\n              \\\"fill\\\": {\\\"value\\\": \\\"black\\\"},\\n              \\\"width\\\": {\\\"value\\\": 1}\\n            },\\n            \\\"update\\\": {\\n              \\\"height\\\": {\\\"value\\\": 20},\\n              \\\"yc\\\": {\\\"offset\\\": -0.5},\\n              \\\"x\\\": {\\\"scale\\\": \\\"xscale\\\", \\\"field\\\": \\\"median\\\"}\\n            }\\n          }\\n        }\\n      ]\\n    }\\n  ]\\n}\"},\"aggs\":[]}",
       "uiStateJSON": "{}",
       "description": "",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"query\":{\"query\":{\"query_string\":{\"query\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Docker-containers",
-    "_type": "visualization",
-    "_source": {
-      "title": "Docker containers [Metricbeat Docker]",
-      "visState": "{\"title\":\"Docker containers [Metricbeat Docker]\",\"type\":\"table\",\"params\":{\"perPage\":8,\"showMeticsAtAllLevels\":false,\"showPartialRows\":false,\"showTotal\":true,\"sort\":{\"columnIndex\":1,\"direction\":\"asc\"},\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"docker.container.name.keyword\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":15,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Name\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"docker.cpu.total.pct\",\"customLabel\":\"CPU usage (%)\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"docker.diskio.total\",\"customLabel\":\"DiskIO\"}},{\"id\":\"5\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"docker.memory.usage.pct\",\"customLabel\":\"Mem (%)\"}},{\"id\":\"6\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"docker.memory.rss.total\",\"customLabel\":\"Mem RSS\"}},{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"docker.container.name.keyword\",\"customLabel\":\"Number of Containers\"}}]}",
-      "uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":1,\"direction\":\"asc\"}}}}",
-      "description": "",
-      "savedSearchId": "Metricbeat-Docker",
-      "version": 1,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
+        "searchSourceJSON": "{}"
       }
     },
     "_meta": {
@@ -1999,17 +82,16 @@
     }
   },
   {
-    "_id": "Docker-containers-per-host",
+    "_id": "d99f6c10-c7b2-11e8-af80-b5004896a534",
     "_type": "visualization",
     "_source": {
-      "title": "Docker containers per host [Metricbeat Docker]",
-      "visState": "{\"title\":\"Docker containers per host [Metricbeat Docker]\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"isDonut\":true,\"type\":\"pie\",\"labels\":{\"show\":false,\"values\":true,\"last_level\":true,\"truncate\":100}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"docker.container.name.keyword\",\"customLabel\":\"Number of containers\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"docker.container.name.keyword\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":15,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Hosts\"}}]}",
+      "title": "Archive exec times over archive size [nomad]",
+      "visState": "{\"title\":\"Archive exec times over archive size [nomad]\",\"type\":\"vega\",\"params\":{\"spec\":\"{\\n  $schema: https://vega.github.io/schema/vega-lite/v2.json\\n  data: {\\n    url: {\\n      index: logstash-*\\n      body: {\\n        size: 10000\\n        query: {\\n          bool: {\\n            must: [\\n              \\\"%dashboard_context-must_clause%\\\"\\n              {\\n                range: {\\n                  @timestamp: {\\n                    \\\"%timefilter%\\\": true\\n                  }\\n                }\\n              }\\n              {\\n                match: {\\n                  event.keyword: archived\\n                }\\n              }\\n            ]\\n            must_not: [\\n              \\\"%dashboard_context-must_not_clause%\\\"\\n            ]\\n          }\\n        }\\n        _source: [\\n          \\\"nomad.processing.input_size\\\", \\n          \\\"nomad.processing.exec_time\\\",\\n          \\\"nomad.processing.archive_size\\\"]\\n      }\\n    }\\n    format: {property: \\\"hits.hits\\\"}\\n  }\\n  transform: [\\n    {\\n      calculate: \\\"datum._source['nomad.processing.input_size']\\\"\\n      as: \\\"input_size\\\"\\n    },\\n    {\\n      calculate: \\\"datum._source['nomad.processing.exec_time']\\\"\\n      as: \\\"exec_time\\\"\\n    },\\n    {\\n      calculate: \\\"datum._source['nomad.processing.archive_size']\\\"\\n      as: \\\"archive_size\\\"\\n    }\\n  ]\\n  mark: circle\\n  encoding: {\\n    x: {\\n      field: archive_size\\n      type: quantitative\\n      scale: {\\n        type: log\\n      }\\n      axis: { \\n        title: Archive size in bytes \\n        format: s\\n      }\\n    }\\n    y: {\\n      field: exec_time\\n      type: quantitative\\n      scale: {\\n        type: log\\n      }\\n      axis: { \\n        title: Exec time in s\\n        format: s\\n      }\\n    }\\n    size: {\\n      field: \\\"input_size\\\", \\n      type: \\\"quantitative\\\", \\n      legend: {\\n        title: Input size in bytes\\n        format: s\\n      },\\n      scale: {\\n        // type: log\\n      }\\n    }\\n  }\\n}\\n\"},\"aggs\":[]}",
       "uiStateJSON": "{}",
       "description": "",
-      "savedSearchId": "Metricbeat-Docker",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
+        "searchSourceJSON": "{}"
       }
     },
     "_meta": {
@@ -2017,17 +99,16 @@
     }
   },
   {
-    "_id": "Docker-images-and-names",
+    "_id": "a2e3f4e0-c7b0-11e8-af80-b5004896a534",
     "_type": "visualization",
     "_source": {
-      "title": "Docker images and names [Metricbeat Docker]",
-      "visState": "{\"title\":\"Docker images and names [Metricbeat Docker]\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"isDonut\":true,\"type\":\"pie\",\"labels\":{\"show\":false,\"values\":true,\"last_level\":true,\"truncate\":100}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"docker.container.image.keyword\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"docker.container.name.keyword\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}]}",
+      "title": "Normalizer exec times over input size [nomad]",
+      "visState": "{\"title\":\"Normalizer exec times over input size [nomad]\",\"type\":\"vega\",\"params\":{\"spec\":\"{\\n  $schema: https://vega.github.io/schema/vega-lite/v2.json\\n  data: {\\n    url: {\\n      index: logstash-*\\n      body: {\\n        size: 10000\\n        query: {\\n          bool: {\\n            must: [\\n              \\\"%dashboard_context-must_clause%\\\"\\n              {\\n                range: {\\n                  @timestamp: {\\n                    \\\"%timefilter%\\\": true\\n                  }\\n                }\\n              }\\n              {\\n                match: {\\n                  event.keyword: normalizer executed\\n                }\\n              }\\n            ]\\n            must_not: [\\n              \\\"%dashboard_context-must_not_clause%\\\"\\n            ]\\n          }\\n        }\\n        _source: [\\n          \\\"nomad.processing.input_size\\\", \\n          \\\"nomad.processing.exec_time\\\",\\n          \\\"nomad.processing.normalizer\\\"]\\n      }\\n    }\\n    format: {property: \\\"hits.hits\\\"}\\n  }\\n  transform: [\\n    {\\n      calculate: \\\"datum._source['nomad.processing.input_size']\\\"\\n      as: \\\"input_size\\\"\\n    },\\n    {\\n      calculate: \\\"datum._source['nomad.processing.exec_time']\\\"\\n      as: \\\"exec_time\\\"\\n    },\\n    {\\n      calculate: \\\"substring(datum._source['nomad.processing.normalizer'], 0)\\\"\\n      as: \\\"normalizer\\\"\\n    }\\n  ]\\n  mark: point\\n  encoding: {\\n    x: {\\n      field: input_size\\n      type: quantitative\\n      scale: {\\n        type: log\\n      }\\n      axis: { \\n        title: Input size in bytes \\n        format: s\\n      }\\n    }\\n    y: {\\n      field: exec_time\\n      type: quantitative\\n      scale: {\\n        type: log\\n      }\\n      axis: { \\n        title: \\\"Exec time in s\\\" \\n        format: s\\n      }\\n    }\\n    color: {\\n      field: normalizer\\n      type: nominal\\n      legend: { title: 'Normalizer' }\\n    }\\n    shape: {\\n      field: normalizer\\n      type: nominal\\n    }\\n  }\\n}\\n\"},\"aggs\":[]}",
       "uiStateJSON": "{}",
       "description": "",
-      "savedSearchId": "Metricbeat-Docker",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
+        "searchSourceJSON": "{}"
       }
     },
     "_meta": {
@@ -2035,67 +116,16 @@
     }
   },
   {
-    "_id": "Docker-Network-IO",
+    "_id": "50c32d20-c7b0-11e8-af80-b5004896a534",
     "_type": "visualization",
     "_source": {
-      "title": "Network IO [Metricbeat Docker]",
-      "visState": "{\"title\":\"Network IO [Metricbeat Docker]\",\"type\":\"area\",\"params\":{\"addLegend\":true,\"addTimeMarker\":false,\"addTooltip\":true,\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"labels\":{\"show\":true,\"truncate\":100},\"position\":\"bottom\",\"scale\":{\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{},\"type\":\"category\"}],\"defaultYExtents\":false,\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"interpolate\":\"linear\",\"legendPosition\":\"top\",\"mode\":\"stacked\",\"scale\":\"linear\",\"seriesParams\":[{\"data\":{\"id\":\"1\",\"label\":\"IN bytes\"},\"drawLinesBetweenPoints\":true,\"interpolate\":\"linear\",\"mode\":\"stacked\",\"show\":\"true\",\"showCircles\":true,\"type\":\"area\",\"valueAxis\":\"ValueAxis-1\"},{\"show\":true,\"mode\":\"stacked\",\"type\":\"area\",\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"data\":{\"id\":\"4\",\"label\":\"OUT bytes\"},\"valueAxis\":\"ValueAxis-1\"}],\"setYExtents\":false,\"shareYAxis\":true,\"smoothLines\":true,\"times\":[],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"labels\":{\"filter\":false,\"rotate\":0,\"show\":true,\"truncate\":100},\"name\":\"LeftAxis-1\",\"position\":\"left\",\"scale\":{\"mode\":\"normal\",\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{\"text\":\"Count\"},\"type\":\"value\"}],\"yAxis\":{},\"type\":\"area\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"docker.network.in.bytes\",\"customLabel\":\"IN bytes\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"docker.container.name.keyword\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":15,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Container name\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"docker.network.out.bytes\",\"customLabel\":\"OUT bytes\"}}]}",
+      "title": "Parser exec times over input size [nomad]",
+      "visState": "{\"title\":\"Parser exec times over input size [nomad]\",\"type\":\"vega\",\"params\":{\"spec\":\"{\\n  $schema: https://vega.github.io/schema/vega-lite/v2.json\\n  data: {\\n    url: {\\n      index: logstash-*\\n      body: {\\n        size: 10000\\n        query: {\\n          bool: {\\n            must: [\\n              \\\"%dashboard_context-must_clause%\\\"\\n              {\\n                range: {\\n                  @timestamp: {\\n                    \\\"%timefilter%\\\": true\\n                  }\\n                }\\n              }\\n              {\\n                match: {\\n                  event.keyword: parser executed\\n                }\\n              }\\n            ]\\n            must_not: [\\n              \\\"%dashboard_context-must_not_clause%\\\"\\n            ]\\n          }\\n        }\\n        _source: [\\n          \\\"nomad.processing.input_size\\\", \\n          \\\"nomad.processing.exec_time\\\",\\n          \\\"nomad.processing.parser\\\"]\\n      }\\n    }\\n    format: {property: \\\"hits.hits\\\"}\\n  }\\n  transform: [\\n    {\\n      calculate: \\\"datum._source['nomad.processing.input_size']\\\"\\n      as: \\\"input_size\\\"\\n    },\\n    {\\n      calculate: \\\"datum._source['nomad.processing.exec_time']\\\"\\n      as: \\\"exec_time\\\"\\n    },\\n    {\\n      calculate: \\\"substring(datum._source['nomad.processing.parser'], 8)\\\"\\n      as: \\\"parser\\\"\\n    }\\n  ]\\n  mark: point\\n  encoding: {\\n    x: {\\n      field: input_size\\n      type: quantitative\\n      scale: {\\n        type: log\\n      }\\n      axis: { \\n        title: Input size in bytes \\n        format: s\\n      }\\n    }\\n    y: {\\n      field: exec_time\\n      type: quantitative\\n      scale: {\\n        type: log\\n      }\\n      axis: { \\n        title: \\\"Exec time in s\\\" \\n        format: s\\n      }\\n    }\\n    color: {\\n      field: parser\\n      type: nominal\\n      legend: { title: 'Parser' }\\n    }\\n    shape: {\\n      field: parser\\n      type: nominal\\n    }\\n  }\\n}\\n\"},\"aggs\":[]}",
       "uiStateJSON": "{}",
       "description": "",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"filter\":[],\"index\":\"metricbeat-*\",\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647},\"query\":{\"query\":{\"query_string\":{\"query\":\"metricset.module:docker AND metricset.name:network\",\"analyze_wildcard\":true,\"default_field\":\"*\"}},\"language\":\"lucene\"}}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Container-Memory-stats",
-    "_type": "visualization",
-    "_source": {
-      "title": "Container Memory stats [Metricbeat System]",
-      "visState": "{\"title\":\"Container Memory stats [Metricbeat System]\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showMeticsAtAllLevels\":false,\"showPartialRows\":false,\"showTotal\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"13\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.memory.mem.usage.bytes\",\"customLabel\":\"Usage\"}},{\"id\":\"14\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.memory.mem.usage.max.bytes\",\"customLabel\":\"Max usage\"}},{\"id\":\"1\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.memory.stats.page_faults\",\"customLabel\":\"Page faults\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.memory.stats.pages_in\",\"customLabel\":\"Pages in memory\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.memory.stats.pages_out\",\"customLabel\":\"Pages out of memory\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"system.process.pid\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":50,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Container ID\"}},{\"id\":\"5\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.memory.stats.inactive_file.bytes\",\"customLabel\":\"Inactive files\"}},{\"id\":\"6\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.memory.stats.major_page_faults\",\"customLabel\":\"# Major page faults\"}},{\"id\":\"8\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"system.process.name.keyword\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Process name\"}},{\"id\":\"12\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.memory.mem.failures\",\"customLabel\":\"Failures\"}},{\"id\":\"10\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.memory.kmem_tcp.usage.bytes\",\"customLabel\":\"TCP buffers\"}},{\"id\":\"11\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.memory.stats.rss_huge.bytes\",\"customLabel\":\"Huge pages\"}},{\"id\":\"7\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.memory.stats.rss.bytes\",\"customLabel\":\"Swap caches\"}},{\"id\":\"15\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.memory.stats.swap.bytes\",\"customLabel\":\"Swap usage\"}},{\"id\":\"16\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.blkio.total.ios\",\"customLabel\":\"Block I/O\"}}]}",
-      "uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
-      "description": "",
-      "version": 1,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"metricbeat-*\",\"query\":{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true,\"default_field\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Container-CPU-usage",
-    "_type": "visualization",
-    "_source": {
-      "title": "Container CPU usage [Metricbeat System]",
-      "visState": "{\"title\":\"Container CPU usage [Metricbeat System]\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.cpuacct.stats.user.ns\",\"customLabel\":\"CPU user\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.cpu.cfs.quota.us\",\"customLabel\":\"CPU quota\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"system.process.pid\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Container ID\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.cpu.stats.throttled.ns\",\"customLabel\":\"CPU throttling\"}},{\"id\":\"5\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.cpuacct.stats.system.ns\",\"customLabel\":\"CPU kernel\"}},{\"id\":\"6\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"system.process.name.keyword\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Process name\"}}]}",
-      "uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
-      "description": "",
-      "version": 1,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"metricbeat-*\",\"query\":{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true,\"default_field\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "Container-Block-IO",
-    "_type": "visualization",
-    "_source": {
-      "title": "Container Block IO [Metricbeat System]",
-      "visState": "{\"title\":\"Container Block IO [Metricbeat System]\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showMeticsAtAllLevels\":false,\"showPartialRows\":false,\"showTotal\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.blkio.total.bytes\",\"customLabel\":\"Total\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"system.process.cgroup.blkio.total.ios\",\"customLabel\":\"I/O\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"system.process.pid\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Container ID\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"system.process.name.keyword\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Process name\"}}]}",
-      "uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
-      "description": "",
-      "version": 1,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"metricbeat-*\",\"query\":{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\",\"default_field\":\"*\"}},\"language\":\"lucene\"},\"filter\":[]}"
+        "searchSourceJSON": "{}"
       }
     },
     "_meta": {
@@ -2112,7 +142,7 @@
       "description": "",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"filter\":[{\"meta\":{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"type\":\"phrases\",\"key\":\"level.keyword\",\"value\":\"ERROR, CRITICAL\",\"params\":[\"ERROR\",\"CRITICAL\"],\"negate\":false,\"disabled\":false,\"alias\":null},\"query\":{\"bool\":{\"should\":[{\"match_phrase\":{\"level.keyword\":\"ERROR\"}},{\"match_phrase\":{\"level.keyword\":\"CRITICAL\"}}],\"minimum_should_match\":1}},\"$state\":{\"store\":\"appState\"}}],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
+        "searchSourceJSON": "{\"index\":\"f02ffa10-02ab-11e9-8c88-c573528f3b8d\",\"filter\":[{\"meta\":{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"type\":\"phrases\",\"key\":\"level.keyword\",\"value\":\"ERROR, CRITICAL\",\"params\":[\"ERROR\",\"CRITICAL\"],\"negate\":false,\"disabled\":false,\"alias\":null},\"query\":{\"bool\":{\"should\":[{\"match_phrase\":{\"level.keyword\":\"ERROR\"}},{\"match_phrase\":{\"level.keyword\":\"CRITICAL\"}}],\"minimum_should_match\":1}},\"$state\":{\"store\":\"appState\"}}],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
       }
     },
     "_meta": {
@@ -2129,7 +159,7 @@
       "description": "",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"filter\":[],\"query\":{\"language\":\"lucene\",\"query\":\"\"}}"
+        "searchSourceJSON": "{\"index\":\"f02ffa10-02ab-11e9-8c88-c573528f3b8d\",\"filter\":[],\"query\":{\"language\":\"lucene\",\"query\":\"\"}}"
       }
     },
     "_meta": {
@@ -2146,7 +176,7 @@
       "description": "",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"filter\":[{\"meta\":{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"type\":\"phrases\",\"key\":\"level.keyword\",\"value\":\"CRITICAL, ERROR\",\"params\":[\"CRITICAL\",\"ERROR\"],\"negate\":false,\"disabled\":false,\"alias\":null},\"query\":{\"bool\":{\"should\":[{\"match_phrase\":{\"level.keyword\":\"CRITICAL\"}},{\"match_phrase\":{\"level.keyword\":\"ERROR\"}}],\"minimum_should_match\":1}},\"$state\":{\"store\":\"appState\"}}],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
+        "searchSourceJSON": "{\"index\":\"f02ffa10-02ab-11e9-8c88-c573528f3b8d\",\"filter\":[{\"meta\":{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"type\":\"phrases\",\"key\":\"level.keyword\",\"value\":\"CRITICAL, ERROR\",\"params\":[\"CRITICAL\",\"ERROR\"],\"negate\":false,\"disabled\":false,\"alias\":null},\"query\":{\"bool\":{\"should\":[{\"match_phrase\":{\"level.keyword\":\"CRITICAL\"}},{\"match_phrase\":{\"level.keyword\":\"ERROR\"}}],\"minimum_should_match\":1}},\"$state\":{\"store\":\"appState\"}}],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
       }
     },
     "_meta": {
@@ -2163,7 +193,7 @@
       "description": "",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"filter\":[],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
+        "searchSourceJSON": "{\"index\":\"f02ffa10-02ab-11e9-8c88-c573528f3b8d\",\"filter\":[],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
       }
     },
     "_meta": {
@@ -2180,24 +210,7 @@
       "description": "",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"filter\":[],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "7a71d990-c7c0-11e8-af80-b5004896a534",
-    "_type": "visualization",
-    "_source": {
-      "title": "Max exec times [nomad]",
-      "visState": "{\"title\":\"Max exec times [nomad]\",\"type\":\"vega\",\"params\":{\"spec\":\"{\\n  \\\"$schema\\\": \\\"https://vega.github.io/schema/vega/v3.json\\\",\\n  \\\"data\\\": [{\\n    \\\"name\\\": \\\"results\\\",\\n      \\\"url\\\": {\\n        \\\"index\\\": \\\"logstash-*\\\",\\n        \\\"body\\\": {\\n          \\\"size\\\": 0,\\n          \\\"query\\\": {\\n            terms : {\\n              \\\"nomad.processing.current_task\\\": [\\\"parsing\\\", \\\"normalizing\\\"]\\n            }\\n          }\\n        \\t\\\"aggs\\\": {\\n        \\t\\t\\\"exec_time_per_step\\\": {\\n        \\t\\t\\t\\\"terms\\\": {\\n        \\t\\t\\t\\t\\\"field\\\": \\\"nomad.processing.step.keyword\\\"\\n        \\t\\t\\t},\\n        \\t\\t\\t\\\"aggs\\\": {\\n        \\t\\t\\t\\t\\\"quantile\\\": {\\n        \\t\\t\\t\\t\\t\\\"percentiles\\\": {\\n        \\t\\t\\t\\t\\t\\t\\\"field\\\": \\\"nomad.processing.exec_time\\\",\\n        \\t\\t\\t\\t\\t\\t\\\"percents\\\": [\\n        \\t\\t\\t\\t\\t\\t\\t25,\\n        \\t\\t\\t\\t\\t\\t\\t50,\\n        \\t\\t\\t\\t\\t\\t\\t75\\n        \\t\\t\\t\\t\\t\\t]\\n        \\t\\t\\t\\t\\t}\\n        \\t\\t\\t\\t},\\n        \\t\\t\\t\\t\\\"min_value\\\":{\\n        \\t\\t\\t\\t\\t\\\"min\\\": {\\n        \\t\\t\\t\\t\\t\\t\\\"field\\\": \\\"nomad.processing.exec_time\\\"\\n        \\t\\t\\t\\t\\t}\\n        \\t\\t\\t\\t},\\n        \\t\\t\\t\\t\\\"max_value\\\":{\\n        \\t\\t\\t\\t\\t\\\"max\\\": {\\n        \\t\\t\\t\\t\\t\\t\\\"field\\\": \\\"nomad.processing.exec_time\\\"\\n        \\t\\t\\t\\t\\t}\\n        \\t\\t\\t\\t},\\n        \\t\\t\\t\\t\\\"max_value_sort\\\": {\\n          \\t\\t\\t\\t\\\"bucket_sort\\\": {\\n              \\t\\t\\t\\\"sort\\\": [\\n                      {\\\"max_value\\\": {\\\"order\\\": \\\"desc\\\"}}\\n                    ],\\n                    \\\"size\\\": 10\\n                  }\\n                }\\n        \\t\\t\\t}\\n        \\t\\t}\\n        \\t}\\n        }\\n      },\\n      \\\"format\\\": {\\n        \\\"property\\\": \\\"aggregations.exec_time_per_step.buckets\\\"\\n      },\\n      \\\"transform\\\":[\\n        {\\n          \\\"type\\\": \\\"formula\\\",\\n          \\\"expr\\\": \\\"datum.quantile.values['25.0']\\\"\\n          \\\"as\\\": \\\"q1\\\"\\n        },\\n        {\\n          \\\"type\\\": \\\"formula\\\",\\n          \\\"expr\\\": \\\"datum.quantile.values['50.0']\\\"\\n          \\\"as\\\": \\\"median\\\"\\n        },\\n        {\\n          \\\"type\\\": \\\"formula\\\",\\n          \\\"expr\\\": \\\"datum.quantile.values['75.0']\\\"\\n          \\\"as\\\": \\\"q3\\\"\\n        },\\n        {\\n          \\\"type\\\": \\\"formula\\\",\\n          \\\"expr\\\": \\\"datum.min_value.value\\\"\\n          \\\"as\\\": \\\"min_value\\\"\\n        },\\n        {\\n          \\\"type\\\": \\\"formula\\\",\\n          \\\"expr\\\": \\\"datum.max_value.value\\\"\\n          \\\"as\\\": \\\"max_value\\\"\\n        },\\n        {\\n          \\\"type\\\": \\\"formula\\\",\\n          \\\"expr\\\": \\\"datum.key\\\",\\n          \\\"as\\\": \\\"step\\\"\\n        },\\n        {\\n        \\\"type\\\": \\\"fold\\\",\\n        \\\"fields\\\": [\\\"min_value\\\", \\\"q1\\\", \\\"median\\\", \\\"q3\\\", \\\"max_value\\\"],\\n         \\\"as\\\": [\\\"metric\\\", \\\"metricValue\\\"]\\n         }\\n      ]\\n    }],\\n  \\\"scales\\\": [\\n    {\\n      \\\"name\\\": \\\"layout\\\",\\n      \\\"type\\\": \\\"band\\\",\\n      \\\"range\\\": \\\"height\\\",\\n      \\\"domain\\\": {\\\"data\\\": \\\"results\\\", \\\"field\\\": \\\"key\\\"}\\n    },\\n    {\\n      \\\"name\\\": \\\"xscale\\\",\\n      \\\"type\\\": \\\"log\\\",\\n      \\\"range\\\": \\\"width\\\", \\\"round\\\": true,\\n      \\\"domain\\\": {\\\"data\\\": \\\"results\\\", \\\"field\\\": \\\"metricValue\\\"},\\n      \\\"zero\\\": false, \\\"nice\\\": true\\n    },\\n    {\\n      \\\"name\\\": \\\"color\\\",\\n      \\\"type\\\": \\\"ordinal\\\",\\n      \\\"range\\\": \\\"category\\\"\\n    }\\n  ],\\n  \\\"axes\\\": [\\n    {\\\"orient\\\": \\\"bottom\\\", \\\"scale\\\": \\\"xscale\\\", \\\"zindex\\\": 1, \\\"format\\\": \\\"s\\\", \\\"title\\\": \\\"Exec time in s\\\"},\\n    {\\\"orient\\\": \\\"left\\\", \\\"scale\\\": \\\"layout\\\", \\\"tickCount\\\": 20, \\\"zindex\\\": 1}\\n  ],\\n  \\\"marks\\\": [\\n    {\\n      \\\"type\\\": \\\"group\\\",\\n      \\\"from\\\": {\\n        \\\"facet\\\": {\\n          \\\"data\\\": \\\"results\\\",\\n          \\\"name\\\": \\\"meters\\\",\\n          \\\"groupby\\\": \\\"key\\\"\\n        }\\n      },\\n\\n      \\\"encode\\\": {\\n        \\\"enter\\\": {\\n          \\\"yc\\\": {\\\"scale\\\": \\\"layout\\\", \\\"field\\\": \\\"key\\\", \\\"band\\\": 0.5},\\n          // \\\"height\\\": {\\\"signal\\\": \\\"plotWidth\\\"},\\n          // \\\"width\\\": {\\\"signal\\\": \\\"width\\\"}\\n        }\\n      },\\n\\n      \\\"data\\\": [\\n        {\\n          \\\"name\\\": \\\"summary\\\",\\n          \\\"source\\\": \\\"meters\\\",\\n          \\\"transform\\\": [\\n            {\\n              \\\"type\\\": \\\"aggregate\\\",\\n              \\\"fields\\\": [\\\"metricValue\\\", \\\"metricValue\\\", \\\"metricValue\\\", \\\"metricValue\\\", \\\"metricValue\\\"],\\n              \\\"ops\\\": [\\\"min\\\", \\\"q1\\\", \\\"median\\\", \\\"q3\\\", \\\"max\\\"],\\n              \\\"as\\\": [\\\"min\\\", \\\"q1\\\", \\\"median\\\", \\\"q3\\\", \\\"max\\\"]\\n            }\\n          ]\\n        }\\n      ],\\n\\n      \\\"marks\\\": [\\n        {\\n          \\\"type\\\": \\\"rect\\\",\\n          \\\"from\\\": {\\\"data\\\": \\\"summary\\\"},\\n          \\\"encode\\\": {\\n            \\\"enter\\\": {\\n              \\\"fill\\\": {\\\"value\\\": \\\"black\\\"},\\n              \\\"height\\\": {\\\"value\\\": 1}\\n            },\\n            \\\"update\\\": {\\n              \\\"x\\\": {\\\"scale\\\": \\\"xscale\\\", \\\"field\\\": \\\"min\\\"},\\n              \\\"x2\\\": {\\\"scale\\\": \\\"xscale\\\", \\\"field\\\": \\\"max\\\"},\\n              \\\"zindex\\\": 1\\n            }\\n          }\\n        },\\n        {\\n          \\\"type\\\": \\\"rect\\\",\\n          \\\"from\\\": {\\\"data\\\": \\\"summary\\\"},\\n          \\\"encode\\\": {\\n            \\\"enter\\\": {\\n              \\\"stroke\\\": {\\\"value\\\": \\\"black\\\"}\\n              \\\"fill\\\": {\\\"value\\\": \\\"white\\\"}\\n            },\\n            \\\"update\\\": {\\n              \\\"height\\\": {\\\"value\\\": 20},\\n              \\\"yc\\\": {\\\"offset\\\": -0.5}\\n              \\\"x\\\": {\\\"scale\\\": \\\"xscale\\\", \\\"field\\\": \\\"q1\\\"},\\n              \\\"x2\\\": {\\\"scale\\\": \\\"xscale\\\", \\\"field\\\": \\\"q3\\\"}\\n              \\n            }\\n          }\\n        },\\n        {\\n          \\\"type\\\": \\\"rect\\\",\\n          \\\"from\\\": {\\\"data\\\": \\\"summary\\\"},\\n          \\\"encode\\\": {\\n            \\\"enter\\\": {\\n              \\\"fill\\\": {\\\"value\\\": \\\"black\\\"},\\n              \\\"width\\\": {\\\"value\\\": 1}\\n            },\\n            \\\"update\\\": {\\n              \\\"height\\\": {\\\"value\\\": 20},\\n              \\\"yc\\\": {\\\"offset\\\": -0.5},\\n              \\\"x\\\": {\\\"scale\\\": \\\"xscale\\\", \\\"field\\\": \\\"median\\\"}\\n            }\\n          }\\n        }\\n      ]\\n    }\\n  ]\\n}\"},\"aggs\":[]}",
-      "uiStateJSON": "{}",
-      "description": "",
-      "version": 1,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{}"
+        "searchSourceJSON": "{\"index\":\"f02ffa10-02ab-11e9-8c88-c573528f3b8d\",\"filter\":[],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
       }
     },
     "_meta": {
@@ -2214,41 +227,7 @@
       "description": "",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"filter\":[],\"query\":{\"language\":\"lucene\",\"query\":\"\"}}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "a2e3f4e0-c7b0-11e8-af80-b5004896a534",
-    "_type": "visualization",
-    "_source": {
-      "title": "Normalizer exec times over input size [nomad]",
-      "visState": "{\"title\":\"Normalizer exec times over input size [nomad]\",\"type\":\"vega\",\"params\":{\"spec\":\"{\\n  $schema: https://vega.github.io/schema/vega-lite/v2.json\\n  data: {\\n    url: {\\n      index: logstash-*\\n      body: {\\n        size: 10000\\n        query: {\\n          bool: {\\n            must: [\\n              \\\"%dashboard_context-must_clause%\\\"\\n              {\\n                range: {\\n                  @timestamp: {\\n                    \\\"%timefilter%\\\": true\\n                  }\\n                }\\n              }\\n              {\\n                match: {\\n                  event.keyword: normalizer executed\\n                }\\n              }\\n            ]\\n            must_not: [\\n              \\\"%dashboard_context-must_not_clause%\\\"\\n            ]\\n          }\\n        }\\n        _source: [\\n          \\\"nomad.processing.input_size\\\", \\n          \\\"nomad.processing.exec_time\\\",\\n          \\\"nomad.processing.normalizer\\\"]\\n      }\\n    }\\n    format: {property: \\\"hits.hits\\\"}\\n  }\\n  transform: [\\n    {\\n      calculate: \\\"datum._source['nomad.processing.input_size']\\\"\\n      as: \\\"input_size\\\"\\n    },\\n    {\\n      calculate: \\\"datum._source['nomad.processing.exec_time']\\\"\\n      as: \\\"exec_time\\\"\\n    },\\n    {\\n      calculate: \\\"substring(datum._source['nomad.processing.normalizer'], 0)\\\"\\n      as: \\\"normalizer\\\"\\n    }\\n  ]\\n  mark: point\\n  encoding: {\\n    x: {\\n      field: input_size\\n      type: quantitative\\n      scale: {\\n        type: log\\n      }\\n      axis: { \\n        title: Input size in bytes \\n        format: s\\n      }\\n    }\\n    y: {\\n      field: exec_time\\n      type: quantitative\\n      scale: {\\n        type: log\\n      }\\n      axis: { \\n        title: \\\"Exec time in s\\\" \\n        format: s\\n      }\\n    }\\n    color: {\\n      field: normalizer\\n      type: nominal\\n      legend: { title: 'Normalizer' }\\n    }\\n    shape: {\\n      field: normalizer\\n      type: nominal\\n    }\\n  }\\n}\\n\"},\"aggs\":[]}",
-      "uiStateJSON": "{}",
-      "description": "",
-      "version": 1,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "50c32d20-c7b0-11e8-af80-b5004896a534",
-    "_type": "visualization",
-    "_source": {
-      "title": "Parser exec times over input size [nomad]",
-      "visState": "{\"title\":\"Parser exec times over input size [nomad]\",\"type\":\"vega\",\"params\":{\"spec\":\"{\\n  $schema: https://vega.github.io/schema/vega-lite/v2.json\\n  data: {\\n    url: {\\n      index: logstash-*\\n      body: {\\n        size: 10000\\n        query: {\\n          bool: {\\n            must: [\\n              \\\"%dashboard_context-must_clause%\\\"\\n              {\\n                range: {\\n                  @timestamp: {\\n                    \\\"%timefilter%\\\": true\\n                  }\\n                }\\n              }\\n              {\\n                match: {\\n                  event.keyword: parser executed\\n                }\\n              }\\n            ]\\n            must_not: [\\n              \\\"%dashboard_context-must_not_clause%\\\"\\n            ]\\n          }\\n        }\\n        _source: [\\n          \\\"nomad.processing.input_size\\\", \\n          \\\"nomad.processing.exec_time\\\",\\n          \\\"nomad.processing.parser\\\"]\\n      }\\n    }\\n    format: {property: \\\"hits.hits\\\"}\\n  }\\n  transform: [\\n    {\\n      calculate: \\\"datum._source['nomad.processing.input_size']\\\"\\n      as: \\\"input_size\\\"\\n    },\\n    {\\n      calculate: \\\"datum._source['nomad.processing.exec_time']\\\"\\n      as: \\\"exec_time\\\"\\n    },\\n    {\\n      calculate: \\\"substring(datum._source['nomad.processing.parser'], 8)\\\"\\n      as: \\\"parser\\\"\\n    }\\n  ]\\n  mark: point\\n  encoding: {\\n    x: {\\n      field: input_size\\n      type: quantitative\\n      scale: {\\n        type: log\\n      }\\n      axis: { \\n        title: Input size in bytes \\n        format: s\\n      }\\n    }\\n    y: {\\n      field: exec_time\\n      type: quantitative\\n      scale: {\\n        type: log\\n      }\\n      axis: { \\n        title: \\\"Exec time in s\\\" \\n        format: s\\n      }\\n    }\\n    color: {\\n      field: parser\\n      type: nominal\\n      legend: { title: 'Parser' }\\n    }\\n    shape: {\\n      field: parser\\n      type: nominal\\n    }\\n  }\\n}\\n\"},\"aggs\":[]}",
-      "uiStateJSON": "{}",
-      "description": "",
-      "version": 1,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{}"
+        "searchSourceJSON": "{\"index\":\"f02ffa10-02ab-11e9-8c88-c573528f3b8d\",\"filter\":[],\"query\":{\"language\":\"lucene\",\"query\":\"\"}}"
       }
     },
     "_meta": {
@@ -2265,24 +244,7 @@
       "description": "",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"filter\":[{\"meta\":{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"type\":\"phrases\",\"key\":\"event.keyword\",\"value\":\"task failed, task failed with exception\",\"params\":[\"task failed\",\"task failed with exception\"],\"negate\":false,\"disabled\":false,\"alias\":null},\"query\":{\"bool\":{\"should\":[{\"match_phrase\":{\"event.keyword\":\"task failed\"}},{\"match_phrase\":{\"event.keyword\":\"task failed with exception\"}}],\"minimum_should_match\":1}},\"$state\":{\"store\":\"appState\"}}],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
-      }
-    },
-    "_meta": {
-      "savedObjectVersion": 2
-    }
-  },
-  {
-    "_id": "8bcaf100-c7d3-11e8-af80-b5004896a534",
-    "_type": "visualization",
-    "_source": {
-      "title": "Error events per logger [nomad]",
-      "visState": "{\"title\":\"Error events per logger [nomad]\",\"type\":\"horizontal_bar\",\"params\":{\"type\":\"histogram\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":20},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":75,\"filter\":true,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":true,\"type\":\"histogram\",\"mode\":\"normal\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"event.keyword\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":true,\"missingBucketLabel\":\"<unknown>\",\"size\":10,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Event\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"logger_name.keyword\",\"otherBucket\":true,\"otherBucketLabel\":\"Other\",\"missingBucket\":true,\"missingBucketLabel\":\"<unknown>\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Logger\"}}]}",
-      "uiStateJSON": "{}",
-      "description": "",
-      "version": 1,
-      "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"filter\":[{\"meta\":{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"type\":\"phrases\",\"key\":\"level.keyword\",\"value\":\"ERROR, CRITICAL\",\"params\":[\"ERROR\",\"CRITICAL\"],\"negate\":false,\"disabled\":false,\"alias\":null},\"query\":{\"bool\":{\"should\":[{\"match_phrase\":{\"level.keyword\":\"ERROR\"}},{\"match_phrase\":{\"level.keyword\":\"CRITICAL\"}}],\"minimum_should_match\":1}},\"$state\":{\"store\":\"appState\"}}],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
+        "searchSourceJSON": "{\"index\":\"f02ffa10-02ab-11e9-8c88-c573528f3b8d\",\"filter\":[{\"meta\":{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"type\":\"phrases\",\"key\":\"event.keyword\",\"value\":\"task failed, task failed with exception\",\"params\":[\"task failed\",\"task failed with exception\"],\"negate\":false,\"disabled\":false,\"alias\":null},\"query\":{\"bool\":{\"should\":[{\"match_phrase\":{\"event.keyword\":\"task failed\"}},{\"match_phrase\":{\"event.keyword\":\"task failed with exception\"}}],\"minimum_should_match\":1}},\"$state\":{\"store\":\"appState\"}}],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
       }
     },
     "_meta": {
@@ -2299,7 +261,7 @@
       "description": "",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"filter\":[{\"meta\":{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"type\":\"phrases\",\"key\":\"level.keyword\",\"value\":\"ERROR, CRITICAL\",\"params\":[\"ERROR\",\"CRITICAL\"],\"negate\":false,\"disabled\":false,\"alias\":null},\"query\":{\"bool\":{\"should\":[{\"match_phrase\":{\"level.keyword\":\"ERROR\"}},{\"match_phrase\":{\"level.keyword\":\"CRITICAL\"}}],\"minimum_should_match\":1}},\"$state\":{\"store\":\"appState\"}}],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
+        "searchSourceJSON": "{\"index\":\"f02ffa10-02ab-11e9-8c88-c573528f3b8d\",\"filter\":[{\"meta\":{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"type\":\"phrases\",\"key\":\"level.keyword\",\"value\":\"ERROR, CRITICAL\",\"params\":[\"ERROR\",\"CRITICAL\"],\"negate\":false,\"disabled\":false,\"alias\":null},\"query\":{\"bool\":{\"should\":[{\"match_phrase\":{\"level.keyword\":\"ERROR\"}},{\"match_phrase\":{\"level.keyword\":\"CRITICAL\"}}],\"minimum_should_match\":1}},\"$state\":{\"store\":\"appState\"}}],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
       }
     },
     "_meta": {
@@ -2307,16 +269,16 @@
     }
   },
   {
-    "_id": "1e4b5cd0-c7df-11e8-af80-b5004896a534",
+    "_id": "8bcaf100-c7d3-11e8-af80-b5004896a534",
     "_type": "visualization",
     "_source": {
-      "title": "Failed calc procs [nomad]",
-      "visState": "{\"title\":\"Failed calc procs [nomad]\",\"type\":\"metric\",\"params\":{\"addTooltip\":true,\"addLegend\":false,\"type\":\"metric\",\"metric\":{\"percentageMode\":false,\"useRanges\":false,\"colorSchema\":\"Green to Red\",\"metricColorMode\":\"None\",\"colorsRange\":[{\"from\":0,\"to\":10000}],\"labels\":{\"show\":true},\"invertColors\":false,\"style\":{\"bgFill\":\"#000\",\"bgColor\":false,\"labelColor\":false,\"subText\":\"\",\"fontSize\":60}}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"nomad.archive_id.keyword\",\"customLabel\":\"Failed calc procs\"}}]}",
+      "title": "Error events per logger [nomad]",
+      "visState": "{\"title\":\"Error events per logger [nomad]\",\"type\":\"horizontal_bar\",\"params\":{\"type\":\"histogram\",\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":20},\"title\":{}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":75,\"filter\":true,\"truncate\":100},\"title\":{\"text\":\"Count\"}}],\"seriesParams\":[{\"show\":true,\"type\":\"histogram\",\"mode\":\"normal\",\"data\":{\"label\":\"Count\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"event.keyword\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"missingBucket\":true,\"missingBucketLabel\":\"<unknown>\",\"size\":10,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Event\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"logger_name.keyword\",\"otherBucket\":true,\"otherBucketLabel\":\"Other\",\"missingBucket\":true,\"missingBucketLabel\":\"<unknown>\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Logger\"}}]}",
       "uiStateJSON": "{}",
       "description": "",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"filter\":[{\"$state\":{\"store\":\"appState\"},\"meta\":{\"alias\":null,\"disabled\":false,\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"key\":\"nomad.processing.process.keyword\",\"negate\":false,\"params\":{\"query\":\"Calc\",\"type\":\"phrase\"},\"type\":\"phrase\",\"value\":\"Calc\"},\"query\":{\"match\":{\"nomad.processing.process.keyword\":{\"query\":\"Calc\",\"type\":\"phrase\"}}}},{\"$state\":{\"store\":\"appState\"},\"meta\":{\"alias\":null,\"disabled\":false,\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"key\":\"nomad.processing.status.keyword\",\"negate\":false,\"params\":{\"query\":\"FAILURE\",\"type\":\"phrase\"},\"type\":\"phrase\",\"value\":\"FAILURE\"},\"query\":{\"match\":{\"nomad.processing.status.keyword\":{\"query\":\"FAILURE\",\"type\":\"phrase\"}}}}],\"query\":{\"language\":\"lucene\",\"query\":\"\"}}"
+        "searchSourceJSON": "{\"index\":\"f02ffa10-02ab-11e9-8c88-c573528f3b8d\",\"filter\":[{\"meta\":{\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"type\":\"phrases\",\"key\":\"level.keyword\",\"value\":\"ERROR, CRITICAL\",\"params\":[\"ERROR\",\"CRITICAL\"],\"negate\":false,\"disabled\":false,\"alias\":null},\"query\":{\"bool\":{\"should\":[{\"match_phrase\":{\"level.keyword\":\"ERROR\"}},{\"match_phrase\":{\"level.keyword\":\"CRITICAL\"}}],\"minimum_should_match\":1}},\"$state\":{\"store\":\"appState\"}}],\"query\":{\"query\":\"\",\"language\":\"lucene\"}}"
       }
     },
     "_meta": {
@@ -2324,16 +286,16 @@
     }
   },
   {
-    "_id": "d99f6c10-c7b2-11e8-af80-b5004896a534",
+    "_id": "1e4b5cd0-c7df-11e8-af80-b5004896a534",
     "_type": "visualization",
     "_source": {
-      "title": "Archive exec times over archive size [nomad]",
-      "visState": "{\"title\":\"Archive exec times over archive size [nomad]\",\"type\":\"vega\",\"params\":{\"spec\":\"{\\n  $schema: https://vega.github.io/schema/vega-lite/v2.json\\n  data: {\\n    url: {\\n      index: logstash-*\\n      body: {\\n        size: 10000\\n        query: {\\n          bool: {\\n            must: [\\n              \\\"%dashboard_context-must_clause%\\\"\\n              {\\n                range: {\\n                  @timestamp: {\\n                    \\\"%timefilter%\\\": true\\n                  }\\n                }\\n              }\\n              {\\n                match: {\\n                  event.keyword: archived\\n                }\\n              }\\n            ]\\n            must_not: [\\n              \\\"%dashboard_context-must_not_clause%\\\"\\n            ]\\n          }\\n        }\\n        _source: [\\n          \\\"nomad.processing.input_size\\\", \\n          \\\"nomad.processing.exec_time\\\",\\n          \\\"nomad.processing.archive_size\\\"]\\n      }\\n    }\\n    format: {property: \\\"hits.hits\\\"}\\n  }\\n  transform: [\\n    {\\n      calculate: \\\"datum._source['nomad.processing.input_size']\\\"\\n      as: \\\"input_size\\\"\\n    },\\n    {\\n      calculate: \\\"datum._source['nomad.processing.exec_time']\\\"\\n      as: \\\"exec_time\\\"\\n    },\\n    {\\n      calculate: \\\"datum._source['nomad.processing.archive_size']\\\"\\n      as: \\\"archive_size\\\"\\n    }\\n  ]\\n  mark: circle\\n  encoding: {\\n    x: {\\n      field: archive_size\\n      type: quantitative\\n      scale: {\\n        type: log\\n      }\\n      axis: { \\n        title: Archive size in bytes \\n        format: s\\n      }\\n    }\\n    y: {\\n      field: exec_time\\n      type: quantitative\\n      scale: {\\n        type: log\\n      }\\n      axis: { \\n        title: Exec time in s\\n        format: s\\n      }\\n    }\\n    size: {\\n      field: \\\"input_size\\\", \\n      type: \\\"quantitative\\\", \\n      legend: {\\n        title: Input size in bytes\\n        format: s\\n      },\\n      scale: {\\n        // type: log\\n      }\\n    }\\n  }\\n}\\n\"},\"aggs\":[]}",
+      "title": "Failed calc procs [nomad]",
+      "visState": "{\"title\":\"Failed calc procs [nomad]\",\"type\":\"metric\",\"params\":{\"addTooltip\":true,\"addLegend\":false,\"type\":\"metric\",\"metric\":{\"percentageMode\":false,\"useRanges\":false,\"colorSchema\":\"Green to Red\",\"metricColorMode\":\"None\",\"colorsRange\":[{\"from\":0,\"to\":10000}],\"labels\":{\"show\":true},\"invertColors\":false,\"style\":{\"bgFill\":\"#000\",\"bgColor\":false,\"labelColor\":false,\"subText\":\"\",\"fontSize\":60}}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"nomad.archive_id.keyword\",\"customLabel\":\"Failed calc procs\"}}]}",
       "uiStateJSON": "{}",
       "description": "",
       "version": 1,
       "kibanaSavedObjectMeta": {
-        "searchSourceJSON": "{}"
+        "searchSourceJSON": "{\"index\":\"f02ffa10-02ab-11e9-8c88-c573528f3b8d\",\"filter\":[{\"$state\":{\"store\":\"appState\"},\"meta\":{\"alias\":null,\"disabled\":false,\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"key\":\"nomad.processing.process.keyword\",\"negate\":false,\"params\":{\"query\":\"Calc\",\"type\":\"phrase\"},\"type\":\"phrase\",\"value\":\"Calc\"},\"query\":{\"match\":{\"nomad.processing.process.keyword\":{\"query\":\"Calc\",\"type\":\"phrase\"}}}},{\"$state\":{\"store\":\"appState\"},\"meta\":{\"alias\":null,\"disabled\":false,\"index\":\"e8ef6780-c03a-11e8-b9e8-d74f7e7408ed\",\"key\":\"nomad.processing.status.keyword\",\"negate\":false,\"params\":{\"query\":\"FAILURE\",\"type\":\"phrase\"},\"type\":\"phrase\",\"value\":\"FAILURE\"},\"query\":{\"match\":{\"nomad.processing.status.keyword\":{\"query\":\"FAILURE\",\"type\":\"phrase\"}}}}],\"query\":{\"language\":\"lucene\",\"query\":\"\"}}"
       }
     },
     "_meta": {
diff --git a/ops/containers/elk/run_elk.sh b/ops/containers/elk/run_elk.sh
deleted file mode 100644
index 6484dbb16525e5e1192dae15c90a39adac1e58b3..0000000000000000000000000000000000000000
--- a/ops/containers/elk/run_elk.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-docker build -t nomad_elk .
-docker run -v nomad_elk:/var/lib/elasticsearch -p 15601:5601 -p 15000:5000 -p 15044:5044 --restart=always nomad_elk
\ No newline at end of file
diff --git a/ops/docker-compose/README.md b/ops/docker-compose/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..168c1ce79654a651293cc6b32f4386e583597e27
--- /dev/null
+++ b/ops/docker-compose/README.md
@@ -0,0 +1,28 @@
+## Single Node Deployment, Using Docker Compose
+
+### nomad
+
+In `nomad` you find *docker-compose* files that can be used to run nomad in docker-compose,
+either for developement or production purposes. See [setup](./setup.html) for details
+on running things for development.
+
+We use docker-compose overrides to modify config for development and production. Example:
+
+```
+docker-compose -f docker-compose.yml -f docker-compose.prod.yml up -d api
+```
+
+The different overrides are:
+- `*.prod.yml`, production (currently on enc-preprocessing-nomad.esc)
+- `*.override.yml`, development (will be automatically used by docker-compose)
+- `*.develk.yml`, like development but also runs ELK
+
+The .env file contains some additional config and secrets. The development secrets do
+not matter and are in the git (`.env_development`) and are replaced by real secret on
+the production machine.
+
+### rawapi
+
+Same ad `nomad` but in `rawapi` you find everything to just run the *rawapi*. This is a
+partial api that only serves raw files. It is used by the *materials project* to
+download NOMAD-coe files from Garching. It will be removed after consolidation.
\ No newline at end of file
diff --git a/ops/docker-compose/nomad/docker-compose.develk.yml b/ops/docker-compose/nomad/docker-compose.develk.yml
index 49e73ba86e222b907b7a7da513d9863e8ce210a3..9bf20e8a5958962c176b7e9b5f335ada1d0d5f9f 100644
--- a/ops/docker-compose/nomad/docker-compose.develk.yml
+++ b/ops/docker-compose/nomad/docker-compose.develk.yml
@@ -38,12 +38,10 @@ services:
         container_name: nomad_elk
         expose:
             - 5000 # logstash beats
-            - 5044 # logstash tcp
         volumes:
             - nomad_elk:/var/lib/elasticsearch
         ports:
             - 5601:5601  # kibana web
-            - 5044:5044   # expose to host, allows metricbeat on host to send data
             - 15000:5000   # allow nomad services to logstash outside of docker
             - 29200:9200  # allows metricbeat config to access es
 
diff --git a/ops/docker-compose/nomad/docker-compose.prod.yml b/ops/docker-compose/nomad/docker-compose.prod.yml
index c7811024eb31c1cfd3e982aed8bcc53fdfaca623..013c4ae574db319426c49770b814d3586ede1722 100644
--- a/ops/docker-compose/nomad/docker-compose.prod.yml
+++ b/ops/docker-compose/nomad/docker-compose.prod.yml
@@ -28,17 +28,15 @@ services:
     # used for centralized logging
     elk:
         restart: always
-        build: ../containers/elk/
+        build: ../../containers/elk/
         image: nomad/elk
         container_name: nomad_elk
         expose:
             - 5000 # logstash beats
-            - 5044 # logstash tcp
         volumes:
             - nomad_elk:/var/lib/elasticsearch
         ports:
             - 15601:5601  # kibana web
-            - 5044:5044   # expose to host, allows metricbeat on host to send data
             - 15000:5000
             - 29200:9200  # allows metricbeat config to access es
 
diff --git a/ops/helm/README.md b/ops/helm/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..51177e12d65626e9e88ab5ac8147a66f4c98ba54
--- /dev/null
+++ b/ops/helm/README.md
@@ -0,0 +1,25 @@
+## Cluster Deployment, Using Kubernetes and Helm
+
+We use helm charts to describe the deployment of nomad services in a kubernetes cluster.
+
+### nomad
+
+This chart allows to run the nomad api, worker, gui, and proxy in a kubernetes cluster.
+The `values.yaml` contains more documentation on the different values.
+
+The chart can be used to run multiple nomad instances in parallel on the same cluster,
+by using different URL-path and database names.
+
+The chart does not run any databases and search engines. Those are supposed to run
+separately (see also *nomad-full* for an alternative approach) and their hosts, etc.
+can be configures via helm values.
+
+### rawapi
+
+Similar to *nomad* and similar to *rawapi* in `docker-compose`. Runs rawapi solo for
+the *materials project*.
+
+### nomad-full
+
+This chart is under development. It is an attempt to also run all required databases
+and search engine in the same kubernetes cluster.
\ No newline at end of file
diff --git a/ops/helm/nomad/Chart.yaml b/ops/helm/nomad/Chart.yaml
index b62d6af1d1c7de409bd34815ebb27edf2d9c48cb..76e1b65bedb64ce8e024ff32639c3fe47ebeefc0 100644
--- a/ops/helm/nomad/Chart.yaml
+++ b/ops/helm/nomad/Chart.yaml
@@ -1,5 +1,5 @@
 apiVersion: v1
-appVersion: "1.0"
+appVersion: "0.3.0"
 description: A Helm chart for Kubernetes that only runs nomad services and uses externally hosted databases.
 name: nomad
-version: 0.1.0
+version: 0.3.0
diff --git a/ops/helm/nomad/templates/api-deployment.yaml b/ops/helm/nomad/templates/api-deployment.yaml
index 1838434a6e3b0e40b20fe163a7995dcd8f216408..ad7c74d05643089241a1699f46eaf5d9f7cac042 100644
--- a/ops/helm/nomad/templates/api-deployment.yaml
+++ b/ops/helm/nomad/templates/api-deployment.yaml
@@ -28,6 +28,8 @@ spec:
         env:
         - name: NOMAD_SERVICE
           value: "api"
+        - name: NOMAD_RELEASE
+          value: "{{ .Release.Name }}"
         - name: NOMAD_LOGSTASH_HOST
           value: "{{ .Values.logstash.host }}"
         - name: NOMAD_LOGSTASH_TCPPORT
@@ -44,6 +46,10 @@ spec:
           value: "{{ .Values.proxy.external.path }}/api"
         - name: NOMAD_API_SECRET
           value: "{{ .Values.api.secret }}"
+        - name: NOMAD_API_ADMIN_PASSWORD
+          value: "{{ .Values.api.adminPassword }}"
+        - name: NOMAD_API_DISABLE_RESET
+          value: "{{ .Values.api.disableReset }}"
         - name: NOMAD_RABBITMQ_HOST
           value: "{{ .Release.Name }}-rabbitmq"
         - name: NOMAD_ELASTIC_HOST
@@ -51,22 +57,23 @@ spec:
         - name: NOMAD_ELASTIC_PORT
           value: "{{ .Values.elastic.port }}"
         - name: NOMAD_ELASTIC_INDEX_NAME
-          value: "{{ .Release.Name }}-nomad"
+          value: "{{ .Values.dbname }}"
         - name: NOMAD_MONGO_HOST
           value: "{{ .Values.mongo.host }}"
         - name: NOMAD_MONGO_PORT
           value: "{{ .Values.mongo.port }}"
         - name: NOMAD_MONGO_DB_NAME
-          value: "{{ .Release.Name }}-nomad"
+          value: "{{ .Values.dbname }}"
         - name: NOMAD_COE_REPO_DB_HOST
           value: "{{ .Values.postgres.host }}"
         - name: NOMAD_COE_REPO_DB_PORT
           value: "{{ .Values.postgres.port }}"
         - name: NOMAD_COE_REPO_DB_NAME
-          value: "{{ .Release.Name }}_nomad"
+          value: "{{ .Values.dbname }}"
         command: ["python", "-m", "gunicorn.app.wsgiapp", "-b 0.0.0.0:8000", "nomad.api:app"]
       imagePullSecrets:
       - name: {{ .Values.images.secret }}
+      imagePullPolicy: always
       volumes:
       - name: files-volume
         hostPath:
diff --git a/ops/helm/nomad/templates/gui-deployment.yml b/ops/helm/nomad/templates/gui-deployment.yml
index 1fa0638f7c46af7bc32d3461847b1b7deec43ed0..6aba8995e606665f52ae401fd33bcf2437b6c5cd 100644
--- a/ops/helm/nomad/templates/gui-deployment.yml
+++ b/ops/helm/nomad/templates/gui-deployment.yml
@@ -1,3 +1,32 @@
+apiVersion: v1
+kind: ConfigMap
+metadata:
+  name: {{ include "nomad.fullname" . }}-gui-config
+  labels:
+    app.kubernetes.io/name: {{ include "nomad.name" . }}-gui-config
+    helm.sh/chart: {{ include "nomad.chart" . }}
+    app.kubernetes.io/instance: {{ .Release.Name }}
+    app.kubernetes.io/managed-by: {{ .Release.Service }}
+data:
+  nginx.conf: |
+    server {
+      listen        8080;
+      server_name   www.example.com;
+      location {{ .Values.proxy.external.path }} {
+        root      /app/;
+        rewrite ^{{ .Values.proxy.external.path }}/(.*)$ /nomad/$1 break;
+        try_files $uri {{ .Values.proxy.external.path }}/index.html;
+      }
+    }
+  env.js: |
+    window.nomadEnv = {
+      "apiBase": "{{ .Values.proxy.external.path }}/api",
+      "appBase": "{{ .Values.proxy.external.path }}",
+      "appStaticBase": "{{ .Values.proxy.external.path }}",
+      "kibanaBase": "{{ .Values.proxy.external.kibanaPath }}",
+      "appDebug": false
+    };
+---
 apiVersion: apps/v1
 kind: Deployment
 metadata:
@@ -23,5 +52,32 @@ spec:
       - name: {{ include "nomad.name" . }}-gui
         image: "{{ .Values.images.frontend.name }}:{{ .Values.images.frontend.tag }}"
         command: ["nginx", "-g", "daemon off;"]
+        ports:
+        - containerPort: 8080
+        volumeMounts:
+        - mountPath: /etc/nginx/conf.d
+          readOnly: true
+          name: nginx-conf
+        - mountPath: /app/nomad/config
+          readOnly: true
+          name: nomad-app
+        - mountPath: /var/log/nginx
+          name: log
       imagePullSecrets:
       - name: {{ .Values.images.secret }}
+      imagePullPolicy: always
+      volumes:
+      - name: nginx-conf
+        configMap:
+          name: {{ include "nomad.fullname" . }}-gui-config
+          items:
+          - key: nginx.conf
+            path: default.conf
+      - name: nomad-app
+        configMap:
+          name: {{ include "nomad.fullname" . }}-gui-config
+          items:
+          - key: env.js
+            path: env.js
+      - name: log
+        emptyDir: {}
diff --git a/ops/helm/nomad/templates/proxy-deployment.yml b/ops/helm/nomad/templates/proxy-deployment.yml
index 6957dc9f855ab7b42ad3941e162f82424307823d..25dec0788ba4678bd098295de53d47596475c2bf 100644
--- a/ops/helm/nomad/templates/proxy-deployment.yml
+++ b/ops/helm/nomad/templates/proxy-deployment.yml
@@ -12,7 +12,6 @@ data:
     server {
       listen        80;
       server_name   www.example.com;
-
       location {{ .Values.proxy.external.path }} {
         proxy_pass http://{{ include "nomad.fullname" . }}-gui:{{ .Values.gui.port }};
         proxy_set_header Host $host;
@@ -28,7 +27,7 @@ data:
 
       location {{ .Values.proxy.external.path }}/kibana {
         rewrite ^{{ .Values.proxy.external.path}}/kibana/(.*)$ /$1 break;
-        proxy_pass http://{{ .Values.kibana.host }}:{{ .Values.kibana.port }};  
+        proxy_pass http://{{ .Values.kibana.host }}:{{ .Values.kibana.port }};
         proxy_set_header Host $host;
         proxy_set_header X-Real-IP $remote_addr;
       }
@@ -71,8 +70,8 @@ spec:
         configMap:
           name: {{ include "nomad.fullname" . }}-proxy-config
           items:
-            - key: nginx.conf
-              path: default.conf
+          - key: nginx.conf
+            path: default.conf
       - name: log
         emptyDir: {}
 
diff --git a/ops/helm/nomad/templates/proxy-service.yaml b/ops/helm/nomad/templates/proxy-service.yaml
index 2ad1beea0eb49a49404625ed5976d72c400195a1..a45e70857144192f50e1b9c4ab2ed58bfe89ddf8 100644
--- a/ops/helm/nomad/templates/proxy-service.yaml
+++ b/ops/helm/nomad/templates/proxy-service.yaml
@@ -10,7 +10,7 @@ metadata:
 spec:
   type: NodePort
   externalIPs:
-  - 130.183.207.116 
+  - {{ .Values.proxy.nodeIP }}
   ports:
   - nodePort: {{ .Values.proxy.nodePort }}
     port: {{ .Values.proxy.port }}
diff --git a/ops/helm/nomad/templates/worker-deployment.yaml b/ops/helm/nomad/templates/worker-deployment.yaml
index a68b8801bee6db551ae9c11481aa60f4168fe5e1..92dd0546badb5ed157ca49b03d4ad972eb580e91 100644
--- a/ops/helm/nomad/templates/worker-deployment.yaml
+++ b/ops/helm/nomad/templates/worker-deployment.yaml
@@ -28,6 +28,8 @@ spec:
         env:
         - name: NOMAD_SERVICE
           value: "worker"
+        - name: NOMAD_RELEASE
+          value: "{{ .Release.Name }}"
         - name: NOMAD_LOGSTASH_HOST
           value: "{{ .Values.logstash.host }}"
         - name: NOMAD_LOGSTASH_TCPPORT
@@ -43,22 +45,23 @@ spec:
         - name: NOMAD_ELASTIC_PORT
           value: "{{ .Values.elastic.port }}"
         - name: NOMAD_ELASTIC_INDEX_NAME
-          value: "{{ .Release.Name }}-nomad"
+          value: "{{ .Values.dbname }}"
         - name: NOMAD_MONGO_HOST
           value: "{{ .Values.mongo.host }}"
         - name: NOMAD_MONGO_PORT
           value: "{{ .Values.mongo.port }}"
         - name: NOMAD_MONGO_DB_NAME
-          value: "{{ .Release.Name }}-nomad"
+          value: "{{ .Values.dbname }}"
         - name: NOMAD_COE_REPO_DB_HOST
           value: "{{ .Values.postgres.host }}"
         - name: NOMAD_COE_REPO_DB_PORT
           value: "{{ .Values.postgres.port }}"
         - name: NOMAD_COE_REPO_DB_NAME
-          value: "{{ .Release.Name }}_nomad"
+          value: "{{ .Values.dbname }}"
         command: ["python", "-m", "celery", "worker", "-l", "info", "-A", "nomad.processing"]
       imagePullSecrets:
       - name: {{ .Values.images.secret }}
+      imagePullPolicy: always
       volumes:
       - name: files-volume
         hostPath:
diff --git a/ops/helm/nomad/values.yaml b/ops/helm/nomad/values.yaml
index 2a15e245b45fb918705de6b69c8188942e97e79d..75fef9adb4d94330b59a7d30f84a74dedeab895e 100644
--- a/ops/helm/nomad/values.yaml
+++ b/ops/helm/nomad/values.yaml
@@ -32,6 +32,10 @@ api:
   logstash_loglevel: INFO
   ## Secret used as cryptographic seed
   secret: "defaultApiSecret"
+  ## The adminstrator password (only way to ever set/change it)
+  adminPassword: "password"
+  ## Disable the dangerous reset (delete all data) function
+  disableReset: "true"
 
 ## Everthing concerning the nomad worker
 worker:
@@ -42,16 +46,20 @@ worker:
 ## Everthing concerning the nomad gui
 gui:
   replicas: 1
-  ## This port is hard coded into the frontend image
+  ## This port is hard coded into the frontend image, it cannot be really changed
   port: 8080
 
+## Everything concerning the proxy that combined gui, api, kibana
+#  It is run via NodePort service
 proxy:
   port: 80
-  nodePort: 30001
+  nodePort: 30003
+  nodeIP: 130.183.207.116
   external:
-    host: "localhost"
-    port: 30001
-    path: "/nomad"
+    host: "enc-staging-nomad.esc.rzg.mpg.de"
+    port: 80
+    path: "/fairdi/nomad"
+    kibanaPath: "/fairdi/kibana"
 
 ## configuration of the chart dependency for rabbitmq
 rabbitmq:
@@ -61,6 +69,11 @@ rabbitmq:
     password: rabbitmq
     erlangCookie: SWQOKODSQALRPCLNMEQG
 
+## A common name/prefix for all dbs and indices.
+dbname: fairdi_nomad
+
+## Databases that are not run within the cluster.
+#  To run databases in the cluster, use the nomad-full helm chart.
 mongo:
   host: enc-preprocessing-nomad.esc
   port: 37017
@@ -83,4 +96,4 @@ kibana:
 
 ## Everything concerning the data that is used by the service
 volumes:
-  files: /nomad/nomadlab/nomad-FAIRDI/files
+  files: /scratch/nomad-fair/fs
diff --git a/ops/kubernetes/nomad/deployments.yml b/ops/kubernetes/nomad/deployments.yml
deleted file mode 100644
index f5a31d22a1d6a87c20be69ba24cde69c6f961564..0000000000000000000000000000000000000000
--- a/ops/kubernetes/nomad/deployments.yml
+++ /dev/null
@@ -1,97 +0,0 @@
-apiVersion: apps/v1
-kind: Deployment
-metadata:
-  name: nomad-postgres-deployment
-  labels:
-    app: nomad-postgres
-spec:
-  replicas: 1
-  selector:
-    matchLabels:
-      app: nomad-postgres
-  template:
-    metadata:
-      labels:
-        app: nomad-postgres
-    spec:
-      containers:
-      - name: nomad-postgres
-        image: postgres:9.4
-        environment:
-          POSTGRES_PASSWORD: 'nomad'
-          POSTGRES_USER: 'postgres'
-          POSTGRES_DB: 'nomad'
-        ports:
-        - containerPort: 5432
-        volumeMounts:
-        - mountPath: /var/lib/postgresql/data
-          name: data-volume
-      volumes:
-      - name: data-volume
-        hostPath:
-          path: /scratch/nomad-fair/postgres
-          type: Directory
----
-apiVersion: apps/v1
-kind: Deployment
-metadata:
-  name: nomad-api-deployment
-  labels:
-    app: nomad-api
-spec:
-  replicas: 1
-  selector:
-    matchLabels:
-      app: nomad-api
-  template:
-    metadata:
-      labels:
-        app: nomad-api
-    spec:
-      containers:
-      - name: nomad-api
-        image: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair:latest
-        ports:
-        - containerPort: 8000
-        volumeMounts:
-        - mountPath: /app/.volumes/fs
-          name: files-volume
-        command: python -m gunicorn.app.wsgiapp -w 4 -b 0.0.0.0:8000 --timeout 300 nomad.api:app
-      imagePullSecrets:
-      - name: gitlab-mpcdf  # this is not in the git (.gitignore)
-      volumes:
-      - name: files-volume
-        hostPath:
-          path: /scratch/nomad-fair/fs
-          type: Directory
----
-apiVersion: apps/v1
-kind: Deployment
-metadata:
-  name: nomad-worker-deployment
-  labels:
-    app: nomad-worker
-spec:
-  replicas: 1
-  selector:
-    matchLabels:
-      app: nomad-worker
-  template:
-    metadata:
-      labels:
-        app: nomad-worker
-    spec:
-      containers:
-      - name: nomad-worker
-        image: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair:latest
-        volumeMounts:
-        - mountPath: /app/.volumes/fs
-          name: files-volume
-        command: python -m celery worker -l info -A nomad.processing
-      imagePullSecrets:
-      - name: gitlab-mpcdf  # this is not in the git (.gitignore)
-      volumes:
-      - name: files-volume
-        hostPath:
-          path: /scratch/nomad-fair/fs
-          type: Directory
\ No newline at end of file
diff --git a/ops/kubernetes/pv.yml b/ops/kubernetes/pv.yml
deleted file mode 100644
index 786762ae234784736a1ed5c7567e18513e635b1e..0000000000000000000000000000000000000000
--- a/ops/kubernetes/pv.yml
+++ /dev/null
@@ -1,89 +0,0 @@
-apiVersion: v1
-kind: PersistentVolume
-metadata:
-  name: test-pv1
-spec:
-  capacity:
-    storage: 100Gi
-  volumeMode: Filesystem
-  accessModes:
-    - ReadWriteOnce
-  persistentVolumeReclaimPolicy: Recycle
-  hostPath:
-    path: /scratch/nomad-fair/tests/1
-    type: Directory
----
-apiVersion: v1
-kind: PersistentVolume
-metadata:
-  name: test-pv2
-spec:
-  capacity:
-    storage: 100Gi
-  volumeMode: Filesystem
-  accessModes:
-    - ReadWriteOnce
-  persistentVolumeReclaimPolicy: Recycle
-  hostPath:
-    path: /scratch/nomad-fair/tests/2
-    type: Directory
----
-apiVersion: v1
-kind: PersistentVolume
-metadata:
-  name: test-pv3
-spec:
-  capacity:
-    storage: 100Gi
-  volumeMode: Filesystem
-  accessModes:
-    - ReadWriteOnce
-  persistentVolumeReclaimPolicy: Recycle
-  hostPath:
-    path: /scratch/nomad-fair/tests/3
-    type: Directory
----
-apiVersion: v1
-kind: PersistentVolume
-metadata:
-  name: test-pv4
-spec:
-  capacity:
-    storage: 100Gi
-  volumeMode: Filesystem
-  accessModes:
-    - ReadWriteOnce
-  persistentVolumeReclaimPolicy: Recycle
-  hostPath:
-    path: /scratch/nomad-fair/tests/4
-    type: Directory
----
-apiVersion: v1
-kind: PersistentVolume
-metadata:
-  name: test-pv5
-spec:
-  capacity:
-    storage: 100Gi
-  volumeMode: Filesystem
-  accessModes:
-    - ReadWriteOnce
-  persistentVolumeReclaimPolicy: Recycle
-  hostPath:
-    path: /scratch/nomad-fair/tests/5
-    type: Directory
----
-apiVersion: v1
-kind: PersistentVolume
-metadata:
-  name: test-pv6
-spec:
-  capacity:
-    storage: 100Gi
-  volumeMode: Filesystem
-  accessModes:
-    - ReadWriteOnce
-  persistentVolumeReclaimPolicy: Recycle
-  hostPath:
-    path: /scratch/nomad-fair/tests/6
-    type: Directory
diff --git a/ops/kubernetes/rawapi/.gitignore b/ops/kubernetes/rawapi/.gitignore
deleted file mode 100644
index e984de722d0977c902133720f6750ffafb547b10..0000000000000000000000000000000000000000
--- a/ops/kubernetes/rawapi/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-secrets.yml
\ No newline at end of file
diff --git a/ops/kubernetes/rawapi/README.md b/ops/kubernetes/rawapi/README.md
deleted file mode 100644
index 5237d37157179d21c02870faa04decbd57ecafb7..0000000000000000000000000000000000000000
--- a/ops/kubernetes/rawapi/README.md
+++ /dev/null
@@ -1,5 +0,0 @@
-Deployment requires a secret to log into gitlab's docker registry:
-
-```
-kubectl create secret docker-registry gitlab-mpcdf --docker-server=gitlab-registry.mpcdf.mpg.de --docker-username=<your-user-name > --docker-password=<yourpass> --docker-email=<email>
-```
\ No newline at end of file
diff --git a/ops/kubernetes/rawapi/deployments.yml b/ops/kubernetes/rawapi/deployments.yml
deleted file mode 100644
index e3dea03612a9d69290086d743f5eb45caa20a257..0000000000000000000000000000000000000000
--- a/ops/kubernetes/rawapi/deployments.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-apiVersion: apps/v1
-kind: Deployment
-metadata:
-  name: nomad-rawapi-deployment
-  labels:
-    app: nomad-rawapi
-spec:
-  replicas: 1
-  selector:
-    matchLabels:
-      app: nomad-rawapi
-  template:
-    metadata:
-      labels:
-        app: nomad-rawapi
-    spec:
-      containers:
-      - name: nomad-rawapi
-        image: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair/rawapi:latest
-        ports:
-        - containerPort: 8000
-        volumeMounts:
-        - mountPath: /raw
-          name: raw-volume
-      imagePullSecrets:
-      - name: gitlab-mpcdf  # this is not in the git (.gitignore)
-      volumes:
-      - name: raw-volume
-        hostPath:
-          path: /mnt1/raw  # this is just my minikube mount to a test dir in nomad-FAIR/tests/data/rawapi
-          type: Directory
\ No newline at end of file
diff --git a/ops/kubernetes/rawapi/services.yml b/ops/kubernetes/rawapi/services.yml
deleted file mode 100644
index 88caf81e21a3ea7666918c951a54130857d95da9..0000000000000000000000000000000000000000
--- a/ops/kubernetes/rawapi/services.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-kind: Service
-apiVersion: v1
-metadata:
-  name: nomad-rawapi
-spec:
-  selector:
-    app: nomad-rawapi
-  ports:
-  - protocol: TCP
-    port: 8000
-    targetPort: 8000
-    nodePort: 30001
-  type: NodePort
\ No newline at end of file
diff --git a/requirements-dev.txt b/requirements-dev.txt
index e0266e196690d2feffbe381efa8a0aa384abf800..0c1efbdc1bb8a19df6d773ee935582e711c2ca0d 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -6,7 +6,7 @@ pylint_plugin_utils
 astroid==2.0.4  # bug in pylint_mongoengine with latest version
 pylint_mongoengine
 pycodestyle
-pytest==3.10.0
+pytest==3.10.0  # celery fixtures not compatible with 4.x
 pytest-timeout
 pytest-cov
 rope
diff --git a/requirements.txt b/requirements.txt
index 887aa23d1d00687db6d0c9265f8b876de43597dd..2d53f0561a468e906b5c73c24f9b8bc0491b221f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,7 +3,7 @@ celery[redis]
 elasticsearch-dsl>=6.0.0,<7.0.0
 mongoengine
 flask
-flask-restful
+flask-restplus
 flask-cors
 flask_httpauth
 itsdangerous
@@ -13,6 +13,7 @@ gitpython
 gunicorn
 structlog
 recommonmark
+m2r
 requests
 click
 sphinx
@@ -20,8 +21,12 @@ sphinxcontrib.httpdomain
 sphinx_rtd_theme
 zipstream
 bagit
-psycopg2
+psycopg2-binary
 sqlalchemy
 bcrypt
 matid
-ase==3.15.0
\ No newline at end of file
+ase==3.15.0
+filelock
+ujson
+bravado
+PyJWT
diff --git a/stats.sh b/stats.sh
new file mode 100755
index 0000000000000000000000000000000000000000..f89fdb2452a945a2f8fe39b6d40b154a2017d905
--- /dev/null
+++ b/stats.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+echo "LOC with pygount (pip install pygount)"
+
+echo "backend:       `pygount nomad/ -s py | awk '{print $1}' | paste -sd+ - | bc`"
+echo "backend tests: `pygount tests/ -s py | awk '{print $1}' | paste -sd+ - | bc`"
+echo "frontend:      `pygount gui/src -s js | awk '{print $1}' | paste -sd+ - | bc`"
\ No newline at end of file
diff --git a/tests/bravado_flaks.py b/tests/bravado_flaks.py
new file mode 100644
index 0000000000000000000000000000000000000000..e5d664e4ad280916c70ac93c6ea0198d8502aeb8
--- /dev/null
+++ b/tests/bravado_flaks.py
@@ -0,0 +1,153 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from urllib.parse import urlencode
+from bravado.http_client import HttpClient
+from bravado.http_future import HttpFuture
+from bravado_core.response import IncomingResponse
+import json
+
+
+class FlaskTestHttpClient(HttpClient):
+    def __init__(self, flask_test_client, headers={}):
+        self._flask_client = flask_test_client
+        self._headers = headers
+
+    def request(self, request_params, *args, **kwargs):
+        """
+        Taken from `bravado.http_client.HttpClient`.
+
+        Args:
+            request_params (dict): complete request data. e.g. url, method, headers, body, params,
+                connect_timeout, timeout, etc.
+            operation (`bravado_core.operation.Operation`): operation that this http request
+                is for. Defaults to None - in which case, we're obviously just retrieving a Swagger
+                Spec.
+            response_callbacks: List of callables to post-process the incoming response.
+                Expects args incoming_response and operation.
+            also_return_response: Consult the constructor documentation for
+                `bravado.http_future.HttpFuture`.
+        Returns:
+            `bravado_core.http_future.HttpFuture`: HTTP Future object
+        """
+        request_params.setdefault('headers', {}).update(self._headers)
+        test_future = FlaskTestFutureAdapter(request_params, self._flask_client)
+
+        return HttpFuture(test_future, FlaskTestResponseAdapter, *args, **kwargs)
+
+
+class FlaskTestFutureAdapter:
+    """
+    Mimics a :class:`concurrent.futures.Future` for the purposes of making it work with
+    Bravado's :class:`bravado.http_future.HttpFuture` when simulating calls to a Falcon API.
+    Those calls will be validated by Bravado.
+
+    Args:
+        request_params (dict): Request parameters provided to
+            :class:`bravado.http_client.HttpClient` interface.
+        falcon_api (`falcon.API`): API object to send the request to.
+        response_encoding (str): Encoding that will be used to decode response's body.
+            If set to None then the body won't be decoded.
+    """
+
+    def __init__(self, request_params, flask_client, response_encoding='utf-8'):
+        self._flask_client = flask_client
+        self._request_params = request_params
+        self._response_encoding = response_encoding
+
+        self.timeout_errors = None
+        self.connection_errors = None
+
+    def result(self, **_):
+        """
+        Args:
+            **_: Ignore all the keyword arguments (right now it's just timeout) passed by Bravado.
+        """
+        # Bravado will create the URL by appending request path to 'http://localhost'
+        path = self._request_params['url'].replace('http://localhost', '')
+        method = self._request_params.get('method')
+
+        query = urlencode(self._request_params.get('params', {}))
+        if query is not None and query != '':
+            url = '%s?%s' % (path, query)
+        else:
+            url = path
+
+        data = self._request_params.get('data')
+
+        function = getattr(self._flask_client, method.lower())
+
+        files = self._request_params.get('files', [])
+        if len(files) > 1:
+            raise NotImplementedError
+        if len(files) == 1:
+            _, (_, f) = files[0]
+            data = f
+
+        return function(
+            url, headers=self._request_params.get('headers'), data=data)
+
+
+class FlaskTestResponseAdapter(IncomingResponse):
+    """
+    Wraps a response from Falcon test client to provide a uniform interface
+    expected by Bravado's :class:`bravado.http_future.HttpFuture`.
+    Args:
+        flask_response: Response to a call simulated with flask's test client.
+    """
+
+    def __init__(self, flask_response):
+        self._response = flask_response
+
+    @property
+    def status_code(self):
+        """
+        Returns:
+            int: HTTP status code
+        """
+        return self._response.status_code
+
+    @property
+    def text(self):
+        """
+        Returns:
+            str: Textual representation of the response's body.
+        """
+        return self._response.data
+
+    @property
+    def reason(self):
+        """
+        Returns:
+            str: Reason-phrase of the HTTP response (e.g. "OK", or "Not Found")
+        """
+        # status codes from Falcon look like this: "200 OK"
+        return self._response.status[4:]
+
+    @property
+    def headers(self):
+        """
+        Returns:
+            dict: Headers attached to the response.
+        """
+        return self._response.headers
+
+    def json(self, **kwargs):
+        """
+        Args:
+            **kwargs: This is a part of the interface, but we don't do anything with it.
+        Returns:
+            dict: JSON representation of the response's body.
+        """
+        return json.loads(self._response.data)
diff --git a/tests/conftest.py b/tests/conftest.py
index 31ef00f3d6472f7e23c43c71dd8a61bd0c28e041..116ff506e56e12a13c7566ebba448e13eaa241fc 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -3,8 +3,9 @@ import logging
 from sqlalchemy.orm import Session
 from mongoengine import connect
 from mongoengine.connection import disconnect
+from contextlib import contextmanager
 
-from nomad import config, infrastructure, coe_repo
+from nomad import config, infrastructure
 
 
 @pytest.fixture(scope="session")
@@ -15,6 +16,12 @@ def monkeysession(request):
     mpatch.undo()
 
 
+@pytest.fixture(scope='session', autouse=True)
+def nomad_files(monkeysession):
+    monkeysession.setattr('nomad.config.fs', config.FSConfig(
+        tmp='.volumes/test_fs/tmp', objects='.volumes/test_fs/objects'))
+
+
 @pytest.fixture(scope='session', autouse=True)
 def nomad_logging():
     config.logstash = config.logstash._replace(enabled=False)
@@ -102,63 +109,92 @@ def elastic():
     assert infrastructure.elastic_client is not None
 
 
-@pytest.fixture(scope='session')
-def repository_db(monkeysession):
-    infrastructure.setup_repository_db()
-    assert infrastructure.repository_db_conn is not None
+@contextmanager
+def create_repository_db(monkeysession=None, **kwargs):
+    """
+    A generator that sets up and tears down a test db and monkeypatches it to the
+    respective global infrastructure variables.
+    """
+    db_args = dict(dbname='test_nomad_fair_repo_db')
+    db_args.update(**kwargs)
+
+    old_config = config.repository_db
+    new_config = config.RepositoryDBConfig(
+        old_config.host,
+        old_config.port,
+        db_args.get('dbname'),
+        old_config.user,
+        old_config.password)
+
+    if monkeysession is not None:
+        monkeysession.setattr('nomad.config.repository_db', new_config)
+
+    connection, _ = infrastructure.sqlalchemy_repository_db(**db_args)
+    assert connection is not None
 
     # we use a transaction around the session to rollback anything that happens within
     # test execution
-    trans = infrastructure.repository_db_conn.begin()
-    session = Session(bind=infrastructure.repository_db_conn, autocommit=True)
-    monkeysession.setattr('nomad.infrastructure.repository_db', session)
-    yield infrastructure.repository_db
+    trans = connection.begin()
+    db = Session(bind=connection, autocommit=True)
+
+    old_connection, old_db = None, None
+    if monkeysession is not None:
+        from nomad.infrastructure import repository_db_conn, repository_db
+        old_connection, old_db = repository_db_conn, repository_db
+        monkeysession.setattr('nomad.infrastructure.repository_db_conn', connection)
+        monkeysession.setattr('nomad.infrastructure.repository_db', db)
+
+    yield db
+
+    if monkeysession is not None:
+        monkeysession.setattr('nomad.infrastructure.repository_db_conn', old_connection)
+        monkeysession.setattr('nomad.infrastructure.repository_db', old_db)
+        monkeysession.setattr('nomad.config.repository_db', old_config)
+
     trans.rollback()
-    session.close()
+    db.expunge_all()
+    db.invalidate()
+    db.close_all()
 
+    connection.close()
+    connection.engine.dispose()
 
-@pytest.fixture(scope='session')
+
+@pytest.fixture(scope='module')
+def repository_db(monkeysession):
+    with create_repository_db(monkeysession, exists=False) as db:
+        yield db
+
+
+@pytest.fixture(scope='function')
+def expandable_repo_db(monkeysession, repository_db):
+    with create_repository_db(monkeysession, dbname='test_nomad_fair_expandable_repo_db', exists=False) as db:
+        yield db
+
+
+@pytest.fixture(scope='function')
+def clean_repository_db(repository_db):
+    # do not wonder, this will not setback the id counters
+    repository_db.execute('TRUNCATE uploads CASCADE;')
+    yield repository_db
+
+
+@pytest.fixture(scope='module')
 def test_user(repository_db):
+    from nomad import coe_repo
     return coe_repo.ensure_test_user(email='sheldon.cooper@nomad-fairdi.tests.de')
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope='module')
 def other_test_user(repository_db):
+    from nomad import coe_repo
     return coe_repo.ensure_test_user(email='leonard.hofstadter@nomad-fairdi.tests.de')
 
 
-@pytest.fixture(scope='function')
-def mocksearch(monkeypatch):
-    uploads_by_hash = {}
-    uploads_by_id = {}
-    by_archive_id = {}
-
-    def persist(calc):
-        uploads_by_hash.setdefault(calc.upload_hash, []).append(calc)
-        uploads_by_id.setdefault(calc.upload_id, []).append(calc)
-        by_archive_id[calc.archive_id] = calc
-
-    def upload_exists(upload_hash):
-        return upload_hash in uploads_by_hash
-
-    def delete_upload(upload_id):
-        if upload_id in uploads_by_id:
-            for calc in uploads_by_id[upload_id]:
-                del(by_archive_id[calc.archive_id])
-            upload_hash = next(uploads_by_id[upload_id]).upload_hash
-            del(uploads_by_id[upload_id])
-            del(uploads_by_hash[upload_hash])
-
-    def upload_calcs(upload_id):
-        return uploads_by_id.get(upload_id, [])
-
-    monkeypatch.setattr('nomad.repo.RepoCalc.persist', persist)
-    monkeypatch.setattr('nomad.repo.RepoCalc.upload_exists', upload_exists)
-    monkeypatch.setattr('nomad.repo.RepoCalc.delete_upload', delete_upload)
-    monkeypatch.setattr('nomad.repo.RepoCalc.upload_calcs', upload_calcs)
-    monkeypatch.setattr('nomad.repo.RepoCalc.unstage', lambda *args, **kwargs: None)
-
-    return by_archive_id
+@pytest.fixture(scope='module')
+def admin_user(repository_db):
+    from nomad import coe_repo
+    return coe_repo.admin_user()
 
 
 @pytest.fixture(scope='function')
diff --git a/tests/data/migration/archive/upload/archive.tar.gz b/tests/data/migration/archive/upload/archive.tar.gz
new file mode 100644
index 0000000000000000000000000000000000000000..a665e418544655927532e20d5c5b3d3c2250e573
Binary files /dev/null and b/tests/data/migration/archive/upload/archive.tar.gz differ
diff --git a/tests/data/migration/baseline/upload/1/template.json b/tests/data/migration/baseline/upload/1/template.json
new file mode 100644
index 0000000000000000000000000000000000000000..b85b5bd1742a1b1591c39adbd9086565e9ec0cc3
--- /dev/null
+++ b/tests/data/migration/baseline/upload/1/template.json
@@ -0,0 +1,110 @@
+{
+    "section_run": [
+      {
+        "_name": "section_run",
+        "_gIndex": 0,
+        "program_name": "VASP",
+        "program_version": "4.6.35  3Apr08 complex  parallel LinuxIFC",
+        "program_basis_set_type": "plane waves",
+        "section_method": [
+          {
+            "_name": "section_method",
+            "_gIndex": 0,
+
+            "electronic_structure_method": "DFT",
+            "section_XC_functionals": [
+              {
+                "_name": "section_XC_functionals",
+                "_gIndex": 0,
+                "XC_functional_name": "GGA_X_PBE"
+              }
+            ]
+          }
+        ],
+        "section_system": [
+          {
+            "_name": "section_system",
+            "_gIndex": 0,
+            "simulation_cell": [
+              [
+                5.76372622e-10,
+                0.0,
+                0.0
+              ],
+              [
+                0.0,
+                5.76372622e-10,
+                0.0
+              ],
+              [
+                0.0,
+                0.0,
+                4.0755698899999997e-10
+              ]
+            ],
+            "configuration_periodic_dimensions": [
+              true,
+              true,
+              true
+            ],
+            "atom_positions": [
+              [
+                2.88186311e-10,
+                0.0,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                2.88186311e-10,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                0.0,
+                0.0
+              ],
+              [
+                2.88186311e-10,
+                2.88186311e-10,
+                0.0
+              ]
+            ],
+            "atom_labels": [
+              "Br",
+              "K",
+              "Si",
+              "Si"
+            ]
+          }
+        ],
+        "section_single_configuration_calculation": [
+          {
+            "_name": "section_single_configuration_calculation",
+            "_gIndex": 0,
+            "single_configuration_calculation_to_system_ref": 0,
+            "single_configuration_to_calculation_method_ref": 0,
+            "energy_free": -1.5936767191492225e-18,
+            "energy_total": -1.5935696296699573e-18,
+            "energy_total_T0": -3.2126683561907e-22
+          }
+        ],
+        "section_sampling_method": [
+          {
+            "_name": "section_sampling_method",
+            "_gIndex": 0,
+            "sampling_method": "geometry_optimization"
+          }
+        ],
+        "section_frame_sequence": [
+          {
+            "_name": "section_frame_sequence",
+            "_gIndex": 0,
+            "frame_sequence_to_sampling_ref": 0,
+            "frame_sequence_local_frames_ref": [
+              0
+            ]
+          }
+        ]
+      }
+    ]
+  }
\ No newline at end of file
diff --git a/tests/data/migration/baseline/upload/2/template.json b/tests/data/migration/baseline/upload/2/template.json
new file mode 100644
index 0000000000000000000000000000000000000000..b85b5bd1742a1b1591c39adbd9086565e9ec0cc3
--- /dev/null
+++ b/tests/data/migration/baseline/upload/2/template.json
@@ -0,0 +1,110 @@
+{
+    "section_run": [
+      {
+        "_name": "section_run",
+        "_gIndex": 0,
+        "program_name": "VASP",
+        "program_version": "4.6.35  3Apr08 complex  parallel LinuxIFC",
+        "program_basis_set_type": "plane waves",
+        "section_method": [
+          {
+            "_name": "section_method",
+            "_gIndex": 0,
+
+            "electronic_structure_method": "DFT",
+            "section_XC_functionals": [
+              {
+                "_name": "section_XC_functionals",
+                "_gIndex": 0,
+                "XC_functional_name": "GGA_X_PBE"
+              }
+            ]
+          }
+        ],
+        "section_system": [
+          {
+            "_name": "section_system",
+            "_gIndex": 0,
+            "simulation_cell": [
+              [
+                5.76372622e-10,
+                0.0,
+                0.0
+              ],
+              [
+                0.0,
+                5.76372622e-10,
+                0.0
+              ],
+              [
+                0.0,
+                0.0,
+                4.0755698899999997e-10
+              ]
+            ],
+            "configuration_periodic_dimensions": [
+              true,
+              true,
+              true
+            ],
+            "atom_positions": [
+              [
+                2.88186311e-10,
+                0.0,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                2.88186311e-10,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                0.0,
+                0.0
+              ],
+              [
+                2.88186311e-10,
+                2.88186311e-10,
+                0.0
+              ]
+            ],
+            "atom_labels": [
+              "Br",
+              "K",
+              "Si",
+              "Si"
+            ]
+          }
+        ],
+        "section_single_configuration_calculation": [
+          {
+            "_name": "section_single_configuration_calculation",
+            "_gIndex": 0,
+            "single_configuration_calculation_to_system_ref": 0,
+            "single_configuration_to_calculation_method_ref": 0,
+            "energy_free": -1.5936767191492225e-18,
+            "energy_total": -1.5935696296699573e-18,
+            "energy_total_T0": -3.2126683561907e-22
+          }
+        ],
+        "section_sampling_method": [
+          {
+            "_name": "section_sampling_method",
+            "_gIndex": 0,
+            "sampling_method": "geometry_optimization"
+          }
+        ],
+        "section_frame_sequence": [
+          {
+            "_name": "section_frame_sequence",
+            "_gIndex": 0,
+            "frame_sequence_to_sampling_ref": 0,
+            "frame_sequence_local_frames_ref": [
+              0
+            ]
+          }
+        ]
+      }
+    ]
+  }
\ No newline at end of file
diff --git a/tests/data/migration/example_source_db.sql b/tests/data/migration/example_source_db.sql
new file mode 100644
index 0000000000000000000000000000000000000000..ec92f855e9c95c8c77002c7c7e3ab3d3a7cbf05a
--- /dev/null
+++ b/tests/data/migration/example_source_db.sql
@@ -0,0 +1,50 @@
+SET statement_timeout = 0;
+SET lock_timeout = 0;
+SET client_encoding = 'UTF8';
+SET standard_conforming_strings = on;
+SELECT pg_catalog.set_config('search_path', '', false);
+SET check_function_bodies = false;
+SET client_min_messages = warning;
+
+TRUNCATE TABLE public.users CASCADE;
+INSERT INTO public.users VALUES (1, 'one', 'one', 'one', 'one', NULL, NULL, NULL);
+INSERT INTO public.users VALUES (2, 'two', 'two', 'two', 'two', NULL, NULL, NULL);
+INSERT INTO public.calculations VALUES (NULL, NULL, NULL, NULL, 0, false, 1, NULL);
+INSERT INTO public.calculations VALUES (NULL, NULL, NULL, NULL, 0, false, 2, NULL);
+INSERT INTO public.codefamilies VALUES (1, 'VASP');
+INSERT INTO public.codeversions VALUES (1, 1, '4.6.35');
+-- topcis
+INSERT INTO public.topics VALUES (1, 90, 'tetragonal');
+INSERT INTO public.topics VALUES (2, 220, 'VASP');
+INSERT INTO public.topics VALUES (3, 50, 'bulk');
+INSERT INTO public.topics VALUES (4, 75, 'GGA');
+INSERT INTO public.topics VALUES (5, 80, 'plane waves');
+INSERT INTO public.topics VALUES (6, 10, 'Br');
+INSERT INTO public.topics VALUES (7, 10, 'K');
+INSERT INTO public.topics VALUES (8, 10, 'Si');
+-- mapping topics to calcs via tags
+INSERT INTO public.tags VALUES(1, 1);
+INSERT INTO public.tags VALUES(2, 1);
+INSERT INTO public.tags VALUES(1, 2);
+INSERT INTO public.tags VALUES(2, 2);
+INSERT INTO public.tags VALUES(1, 3);
+INSERT INTO public.tags VALUES(2, 3);
+INSERT INTO public.tags VALUES(1, 4);
+INSERT INTO public.tags VALUES(2, 4);
+INSERT INTO public.tags VALUES(1, 5);
+INSERT INTO public.tags VALUES(2, 5);
+INSERT INTO public.tags VALUES(1, 6);
+INSERT INTO public.tags VALUES(2, 6);
+INSERT INTO public.tags VALUES(1, 7);
+INSERT INTO public.tags VALUES(2, 7);
+INSERT INTO public.tags VALUES(1, 8);
+INSERT INTO public.tags VALUES(2, 8);
+
+INSERT INTO public.metadata VALUES (1, NULL, NULL, NULL, NULL, 'BrKSi2', '2019-01-01 12:00:00', NULL, decode('["$EXTRACTED/upload/1/template.json"]', 'escape'), 1, NULL);
+INSERT INTO public.metadata VALUES (1, NULL, NULL, NULL, NULL, 'BrKSi2', '2015-01-01 13:00:00', NULL, decode('["$EXTRACTED/upload/2/template.json"]', 'escape'), 2, NULL);
+INSERT INTO public.spacegroups VALUES (1, 123);
+INSERT INTO public.spacegroups VALUES (2, 123);
+INSERT INTO public.user_metadata VALUES (1, 0, 'label1');
+INSERT INTO public.user_metadata VALUES (2, 1, 'label2');
+INSERT INTO public.ownerships VALUES (1, 1);
+INSERT INTO public.ownerships VALUES (2, 2);
\ No newline at end of file
diff --git a/tests/data/migration/failed_calc/upload/1/template.json b/tests/data/migration/failed_calc/upload/1/template.json
new file mode 100644
index 0000000000000000000000000000000000000000..b85b5bd1742a1b1591c39adbd9086565e9ec0cc3
--- /dev/null
+++ b/tests/data/migration/failed_calc/upload/1/template.json
@@ -0,0 +1,110 @@
+{
+    "section_run": [
+      {
+        "_name": "section_run",
+        "_gIndex": 0,
+        "program_name": "VASP",
+        "program_version": "4.6.35  3Apr08 complex  parallel LinuxIFC",
+        "program_basis_set_type": "plane waves",
+        "section_method": [
+          {
+            "_name": "section_method",
+            "_gIndex": 0,
+
+            "electronic_structure_method": "DFT",
+            "section_XC_functionals": [
+              {
+                "_name": "section_XC_functionals",
+                "_gIndex": 0,
+                "XC_functional_name": "GGA_X_PBE"
+              }
+            ]
+          }
+        ],
+        "section_system": [
+          {
+            "_name": "section_system",
+            "_gIndex": 0,
+            "simulation_cell": [
+              [
+                5.76372622e-10,
+                0.0,
+                0.0
+              ],
+              [
+                0.0,
+                5.76372622e-10,
+                0.0
+              ],
+              [
+                0.0,
+                0.0,
+                4.0755698899999997e-10
+              ]
+            ],
+            "configuration_periodic_dimensions": [
+              true,
+              true,
+              true
+            ],
+            "atom_positions": [
+              [
+                2.88186311e-10,
+                0.0,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                2.88186311e-10,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                0.0,
+                0.0
+              ],
+              [
+                2.88186311e-10,
+                2.88186311e-10,
+                0.0
+              ]
+            ],
+            "atom_labels": [
+              "Br",
+              "K",
+              "Si",
+              "Si"
+            ]
+          }
+        ],
+        "section_single_configuration_calculation": [
+          {
+            "_name": "section_single_configuration_calculation",
+            "_gIndex": 0,
+            "single_configuration_calculation_to_system_ref": 0,
+            "single_configuration_to_calculation_method_ref": 0,
+            "energy_free": -1.5936767191492225e-18,
+            "energy_total": -1.5935696296699573e-18,
+            "energy_total_T0": -3.2126683561907e-22
+          }
+        ],
+        "section_sampling_method": [
+          {
+            "_name": "section_sampling_method",
+            "_gIndex": 0,
+            "sampling_method": "geometry_optimization"
+          }
+        ],
+        "section_frame_sequence": [
+          {
+            "_name": "section_frame_sequence",
+            "_gIndex": 0,
+            "frame_sequence_to_sampling_ref": 0,
+            "frame_sequence_local_frames_ref": [
+              0
+            ]
+          }
+        ]
+      }
+    ]
+  }
\ No newline at end of file
diff --git a/tests/data/migration/failed_calc/upload/2/template.json b/tests/data/migration/failed_calc/upload/2/template.json
new file mode 100644
index 0000000000000000000000000000000000000000..108dd63a261cc4717db8568bd04d62e373aa4312
--- /dev/null
+++ b/tests/data/migration/failed_calc/upload/2/template.json
@@ -0,0 +1,4 @@
+{
+    "section_run": [
+      {
+        not parsable
\ No newline at end of file
diff --git a/tests/data/migration/failed_upload/upload/archive.tar.gz b/tests/data/migration/failed_upload/upload/archive.tar.gz
new file mode 100644
index 0000000000000000000000000000000000000000..9f53f813bbf0d336c992e86d771b2b44b1cc39f3
--- /dev/null
+++ b/tests/data/migration/failed_upload/upload/archive.tar.gz
@@ -0,0 +1 @@
+not a tar.gz
diff --git a/tests/data/migration/missing_calc/upload/1/template.json b/tests/data/migration/missing_calc/upload/1/template.json
new file mode 100644
index 0000000000000000000000000000000000000000..b85b5bd1742a1b1591c39adbd9086565e9ec0cc3
--- /dev/null
+++ b/tests/data/migration/missing_calc/upload/1/template.json
@@ -0,0 +1,110 @@
+{
+    "section_run": [
+      {
+        "_name": "section_run",
+        "_gIndex": 0,
+        "program_name": "VASP",
+        "program_version": "4.6.35  3Apr08 complex  parallel LinuxIFC",
+        "program_basis_set_type": "plane waves",
+        "section_method": [
+          {
+            "_name": "section_method",
+            "_gIndex": 0,
+
+            "electronic_structure_method": "DFT",
+            "section_XC_functionals": [
+              {
+                "_name": "section_XC_functionals",
+                "_gIndex": 0,
+                "XC_functional_name": "GGA_X_PBE"
+              }
+            ]
+          }
+        ],
+        "section_system": [
+          {
+            "_name": "section_system",
+            "_gIndex": 0,
+            "simulation_cell": [
+              [
+                5.76372622e-10,
+                0.0,
+                0.0
+              ],
+              [
+                0.0,
+                5.76372622e-10,
+                0.0
+              ],
+              [
+                0.0,
+                0.0,
+                4.0755698899999997e-10
+              ]
+            ],
+            "configuration_periodic_dimensions": [
+              true,
+              true,
+              true
+            ],
+            "atom_positions": [
+              [
+                2.88186311e-10,
+                0.0,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                2.88186311e-10,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                0.0,
+                0.0
+              ],
+              [
+                2.88186311e-10,
+                2.88186311e-10,
+                0.0
+              ]
+            ],
+            "atom_labels": [
+              "Br",
+              "K",
+              "Si",
+              "Si"
+            ]
+          }
+        ],
+        "section_single_configuration_calculation": [
+          {
+            "_name": "section_single_configuration_calculation",
+            "_gIndex": 0,
+            "single_configuration_calculation_to_system_ref": 0,
+            "single_configuration_to_calculation_method_ref": 0,
+            "energy_free": -1.5936767191492225e-18,
+            "energy_total": -1.5935696296699573e-18,
+            "energy_total_T0": -3.2126683561907e-22
+          }
+        ],
+        "section_sampling_method": [
+          {
+            "_name": "section_sampling_method",
+            "_gIndex": 0,
+            "sampling_method": "geometry_optimization"
+          }
+        ],
+        "section_frame_sequence": [
+          {
+            "_name": "section_frame_sequence",
+            "_gIndex": 0,
+            "frame_sequence_to_sampling_ref": 0,
+            "frame_sequence_local_frames_ref": [
+              0
+            ]
+          }
+        ]
+      }
+    ]
+  }
\ No newline at end of file
diff --git a/tests/data/migration/missmatch/upload/1/template.json b/tests/data/migration/missmatch/upload/1/template.json
new file mode 100644
index 0000000000000000000000000000000000000000..b85b5bd1742a1b1591c39adbd9086565e9ec0cc3
--- /dev/null
+++ b/tests/data/migration/missmatch/upload/1/template.json
@@ -0,0 +1,110 @@
+{
+    "section_run": [
+      {
+        "_name": "section_run",
+        "_gIndex": 0,
+        "program_name": "VASP",
+        "program_version": "4.6.35  3Apr08 complex  parallel LinuxIFC",
+        "program_basis_set_type": "plane waves",
+        "section_method": [
+          {
+            "_name": "section_method",
+            "_gIndex": 0,
+
+            "electronic_structure_method": "DFT",
+            "section_XC_functionals": [
+              {
+                "_name": "section_XC_functionals",
+                "_gIndex": 0,
+                "XC_functional_name": "GGA_X_PBE"
+              }
+            ]
+          }
+        ],
+        "section_system": [
+          {
+            "_name": "section_system",
+            "_gIndex": 0,
+            "simulation_cell": [
+              [
+                5.76372622e-10,
+                0.0,
+                0.0
+              ],
+              [
+                0.0,
+                5.76372622e-10,
+                0.0
+              ],
+              [
+                0.0,
+                0.0,
+                4.0755698899999997e-10
+              ]
+            ],
+            "configuration_periodic_dimensions": [
+              true,
+              true,
+              true
+            ],
+            "atom_positions": [
+              [
+                2.88186311e-10,
+                0.0,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                2.88186311e-10,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                0.0,
+                0.0
+              ],
+              [
+                2.88186311e-10,
+                2.88186311e-10,
+                0.0
+              ]
+            ],
+            "atom_labels": [
+              "Br",
+              "K",
+              "Si",
+              "Si"
+            ]
+          }
+        ],
+        "section_single_configuration_calculation": [
+          {
+            "_name": "section_single_configuration_calculation",
+            "_gIndex": 0,
+            "single_configuration_calculation_to_system_ref": 0,
+            "single_configuration_to_calculation_method_ref": 0,
+            "energy_free": -1.5936767191492225e-18,
+            "energy_total": -1.5935696296699573e-18,
+            "energy_total_T0": -3.2126683561907e-22
+          }
+        ],
+        "section_sampling_method": [
+          {
+            "_name": "section_sampling_method",
+            "_gIndex": 0,
+            "sampling_method": "geometry_optimization"
+          }
+        ],
+        "section_frame_sequence": [
+          {
+            "_name": "section_frame_sequence",
+            "_gIndex": 0,
+            "frame_sequence_to_sampling_ref": 0,
+            "frame_sequence_local_frames_ref": [
+              0
+            ]
+          }
+        ]
+      }
+    ]
+  }
\ No newline at end of file
diff --git a/tests/data/migration/missmatch/upload/2/template.json b/tests/data/migration/missmatch/upload/2/template.json
new file mode 100644
index 0000000000000000000000000000000000000000..4a2b1b571e9214d9bd033b1baa97d9adb4e8c317
--- /dev/null
+++ b/tests/data/migration/missmatch/upload/2/template.json
@@ -0,0 +1,110 @@
+{
+    "section_run": [
+      {
+        "_name": "section_run",
+        "_gIndex": 0,
+        "program_name": "VASP",
+        "program_version": "4.6.35  3Apr08 complex  parallel LinuxIFC",
+        "program_basis_set_type": "plane waves",
+        "section_method": [
+          {
+            "_name": "section_method",
+            "_gIndex": 0,
+
+            "electronic_structure_method": "DFT",
+            "section_XC_functionals": [
+              {
+                "_name": "section_XC_functionals",
+                "_gIndex": 0,
+                "XC_functional_name": "missmatch"
+              }
+            ]
+          }
+        ],
+        "section_system": [
+          {
+            "_name": "section_system",
+            "_gIndex": 0,
+            "simulation_cell": [
+              [
+                5.76372622e-10,
+                0.0,
+                0.0
+              ],
+              [
+                0.0,
+                5.76372622e-10,
+                0.0
+              ],
+              [
+                0.0,
+                0.0,
+                4.0755698899999997e-10
+              ]
+            ],
+            "configuration_periodic_dimensions": [
+              true,
+              true,
+              true
+            ],
+            "atom_positions": [
+              [
+                2.88186311e-10,
+                0.0,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                2.88186311e-10,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                0.0,
+                0.0
+              ],
+              [
+                2.88186311e-10,
+                2.88186311e-10,
+                0.0
+              ]
+            ],
+            "atom_labels": [
+              "Br",
+              "K",
+              "Si",
+              "Si"
+            ]
+          }
+        ],
+        "section_single_configuration_calculation": [
+          {
+            "_name": "section_single_configuration_calculation",
+            "_gIndex": 0,
+            "single_configuration_calculation_to_system_ref": 0,
+            "single_configuration_to_calculation_method_ref": 0,
+            "energy_free": -1.5936767191492225e-18,
+            "energy_total": -1.5935696296699573e-18,
+            "energy_total_T0": -3.2126683561907e-22
+          }
+        ],
+        "section_sampling_method": [
+          {
+            "_name": "section_sampling_method",
+            "_gIndex": 0,
+            "sampling_method": "geometry_optimization"
+          }
+        ],
+        "section_frame_sequence": [
+          {
+            "_name": "section_frame_sequence",
+            "_gIndex": 0,
+            "frame_sequence_to_sampling_ref": 0,
+            "frame_sequence_local_frames_ref": [
+              0
+            ]
+          }
+        ]
+      }
+    ]
+  }
\ No newline at end of file
diff --git a/tests/data/migration/new_calc/upload/1/template.json b/tests/data/migration/new_calc/upload/1/template.json
new file mode 100644
index 0000000000000000000000000000000000000000..b85b5bd1742a1b1591c39adbd9086565e9ec0cc3
--- /dev/null
+++ b/tests/data/migration/new_calc/upload/1/template.json
@@ -0,0 +1,110 @@
+{
+    "section_run": [
+      {
+        "_name": "section_run",
+        "_gIndex": 0,
+        "program_name": "VASP",
+        "program_version": "4.6.35  3Apr08 complex  parallel LinuxIFC",
+        "program_basis_set_type": "plane waves",
+        "section_method": [
+          {
+            "_name": "section_method",
+            "_gIndex": 0,
+
+            "electronic_structure_method": "DFT",
+            "section_XC_functionals": [
+              {
+                "_name": "section_XC_functionals",
+                "_gIndex": 0,
+                "XC_functional_name": "GGA_X_PBE"
+              }
+            ]
+          }
+        ],
+        "section_system": [
+          {
+            "_name": "section_system",
+            "_gIndex": 0,
+            "simulation_cell": [
+              [
+                5.76372622e-10,
+                0.0,
+                0.0
+              ],
+              [
+                0.0,
+                5.76372622e-10,
+                0.0
+              ],
+              [
+                0.0,
+                0.0,
+                4.0755698899999997e-10
+              ]
+            ],
+            "configuration_periodic_dimensions": [
+              true,
+              true,
+              true
+            ],
+            "atom_positions": [
+              [
+                2.88186311e-10,
+                0.0,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                2.88186311e-10,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                0.0,
+                0.0
+              ],
+              [
+                2.88186311e-10,
+                2.88186311e-10,
+                0.0
+              ]
+            ],
+            "atom_labels": [
+              "Br",
+              "K",
+              "Si",
+              "Si"
+            ]
+          }
+        ],
+        "section_single_configuration_calculation": [
+          {
+            "_name": "section_single_configuration_calculation",
+            "_gIndex": 0,
+            "single_configuration_calculation_to_system_ref": 0,
+            "single_configuration_to_calculation_method_ref": 0,
+            "energy_free": -1.5936767191492225e-18,
+            "energy_total": -1.5935696296699573e-18,
+            "energy_total_T0": -3.2126683561907e-22
+          }
+        ],
+        "section_sampling_method": [
+          {
+            "_name": "section_sampling_method",
+            "_gIndex": 0,
+            "sampling_method": "geometry_optimization"
+          }
+        ],
+        "section_frame_sequence": [
+          {
+            "_name": "section_frame_sequence",
+            "_gIndex": 0,
+            "frame_sequence_to_sampling_ref": 0,
+            "frame_sequence_local_frames_ref": [
+              0
+            ]
+          }
+        ]
+      }
+    ]
+  }
\ No newline at end of file
diff --git a/tests/data/migration/new_calc/upload/2/template.json b/tests/data/migration/new_calc/upload/2/template.json
new file mode 100644
index 0000000000000000000000000000000000000000..b85b5bd1742a1b1591c39adbd9086565e9ec0cc3
--- /dev/null
+++ b/tests/data/migration/new_calc/upload/2/template.json
@@ -0,0 +1,110 @@
+{
+    "section_run": [
+      {
+        "_name": "section_run",
+        "_gIndex": 0,
+        "program_name": "VASP",
+        "program_version": "4.6.35  3Apr08 complex  parallel LinuxIFC",
+        "program_basis_set_type": "plane waves",
+        "section_method": [
+          {
+            "_name": "section_method",
+            "_gIndex": 0,
+
+            "electronic_structure_method": "DFT",
+            "section_XC_functionals": [
+              {
+                "_name": "section_XC_functionals",
+                "_gIndex": 0,
+                "XC_functional_name": "GGA_X_PBE"
+              }
+            ]
+          }
+        ],
+        "section_system": [
+          {
+            "_name": "section_system",
+            "_gIndex": 0,
+            "simulation_cell": [
+              [
+                5.76372622e-10,
+                0.0,
+                0.0
+              ],
+              [
+                0.0,
+                5.76372622e-10,
+                0.0
+              ],
+              [
+                0.0,
+                0.0,
+                4.0755698899999997e-10
+              ]
+            ],
+            "configuration_periodic_dimensions": [
+              true,
+              true,
+              true
+            ],
+            "atom_positions": [
+              [
+                2.88186311e-10,
+                0.0,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                2.88186311e-10,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                0.0,
+                0.0
+              ],
+              [
+                2.88186311e-10,
+                2.88186311e-10,
+                0.0
+              ]
+            ],
+            "atom_labels": [
+              "Br",
+              "K",
+              "Si",
+              "Si"
+            ]
+          }
+        ],
+        "section_single_configuration_calculation": [
+          {
+            "_name": "section_single_configuration_calculation",
+            "_gIndex": 0,
+            "single_configuration_calculation_to_system_ref": 0,
+            "single_configuration_to_calculation_method_ref": 0,
+            "energy_free": -1.5936767191492225e-18,
+            "energy_total": -1.5935696296699573e-18,
+            "energy_total_T0": -3.2126683561907e-22
+          }
+        ],
+        "section_sampling_method": [
+          {
+            "_name": "section_sampling_method",
+            "_gIndex": 0,
+            "sampling_method": "geometry_optimization"
+          }
+        ],
+        "section_frame_sequence": [
+          {
+            "_name": "section_frame_sequence",
+            "_gIndex": 0,
+            "frame_sequence_to_sampling_ref": 0,
+            "frame_sequence_local_frames_ref": [
+              0
+            ]
+          }
+        ]
+      }
+    ]
+  }
\ No newline at end of file
diff --git a/tests/data/migration/new_calc/upload/3/template.json b/tests/data/migration/new_calc/upload/3/template.json
new file mode 100644
index 0000000000000000000000000000000000000000..b85b5bd1742a1b1591c39adbd9086565e9ec0cc3
--- /dev/null
+++ b/tests/data/migration/new_calc/upload/3/template.json
@@ -0,0 +1,110 @@
+{
+    "section_run": [
+      {
+        "_name": "section_run",
+        "_gIndex": 0,
+        "program_name": "VASP",
+        "program_version": "4.6.35  3Apr08 complex  parallel LinuxIFC",
+        "program_basis_set_type": "plane waves",
+        "section_method": [
+          {
+            "_name": "section_method",
+            "_gIndex": 0,
+
+            "electronic_structure_method": "DFT",
+            "section_XC_functionals": [
+              {
+                "_name": "section_XC_functionals",
+                "_gIndex": 0,
+                "XC_functional_name": "GGA_X_PBE"
+              }
+            ]
+          }
+        ],
+        "section_system": [
+          {
+            "_name": "section_system",
+            "_gIndex": 0,
+            "simulation_cell": [
+              [
+                5.76372622e-10,
+                0.0,
+                0.0
+              ],
+              [
+                0.0,
+                5.76372622e-10,
+                0.0
+              ],
+              [
+                0.0,
+                0.0,
+                4.0755698899999997e-10
+              ]
+            ],
+            "configuration_periodic_dimensions": [
+              true,
+              true,
+              true
+            ],
+            "atom_positions": [
+              [
+                2.88186311e-10,
+                0.0,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                2.88186311e-10,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                0.0,
+                0.0
+              ],
+              [
+                2.88186311e-10,
+                2.88186311e-10,
+                0.0
+              ]
+            ],
+            "atom_labels": [
+              "Br",
+              "K",
+              "Si",
+              "Si"
+            ]
+          }
+        ],
+        "section_single_configuration_calculation": [
+          {
+            "_name": "section_single_configuration_calculation",
+            "_gIndex": 0,
+            "single_configuration_calculation_to_system_ref": 0,
+            "single_configuration_to_calculation_method_ref": 0,
+            "energy_free": -1.5936767191492225e-18,
+            "energy_total": -1.5935696296699573e-18,
+            "energy_total_T0": -3.2126683561907e-22
+          }
+        ],
+        "section_sampling_method": [
+          {
+            "_name": "section_sampling_method",
+            "_gIndex": 0,
+            "sampling_method": "geometry_optimization"
+          }
+        ],
+        "section_frame_sequence": [
+          {
+            "_name": "section_frame_sequence",
+            "_gIndex": 0,
+            "frame_sequence_to_sampling_ref": 0,
+            "frame_sequence_local_frames_ref": [
+              0
+            ]
+          }
+        ]
+      }
+    ]
+  }
\ No newline at end of file
diff --git a/tests/data/migration/new_upload/new_upload/1/template.json b/tests/data/migration/new_upload/new_upload/1/template.json
new file mode 100644
index 0000000000000000000000000000000000000000..b85b5bd1742a1b1591c39adbd9086565e9ec0cc3
--- /dev/null
+++ b/tests/data/migration/new_upload/new_upload/1/template.json
@@ -0,0 +1,110 @@
+{
+    "section_run": [
+      {
+        "_name": "section_run",
+        "_gIndex": 0,
+        "program_name": "VASP",
+        "program_version": "4.6.35  3Apr08 complex  parallel LinuxIFC",
+        "program_basis_set_type": "plane waves",
+        "section_method": [
+          {
+            "_name": "section_method",
+            "_gIndex": 0,
+
+            "electronic_structure_method": "DFT",
+            "section_XC_functionals": [
+              {
+                "_name": "section_XC_functionals",
+                "_gIndex": 0,
+                "XC_functional_name": "GGA_X_PBE"
+              }
+            ]
+          }
+        ],
+        "section_system": [
+          {
+            "_name": "section_system",
+            "_gIndex": 0,
+            "simulation_cell": [
+              [
+                5.76372622e-10,
+                0.0,
+                0.0
+              ],
+              [
+                0.0,
+                5.76372622e-10,
+                0.0
+              ],
+              [
+                0.0,
+                0.0,
+                4.0755698899999997e-10
+              ]
+            ],
+            "configuration_periodic_dimensions": [
+              true,
+              true,
+              true
+            ],
+            "atom_positions": [
+              [
+                2.88186311e-10,
+                0.0,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                2.88186311e-10,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                0.0,
+                0.0
+              ],
+              [
+                2.88186311e-10,
+                2.88186311e-10,
+                0.0
+              ]
+            ],
+            "atom_labels": [
+              "Br",
+              "K",
+              "Si",
+              "Si"
+            ]
+          }
+        ],
+        "section_single_configuration_calculation": [
+          {
+            "_name": "section_single_configuration_calculation",
+            "_gIndex": 0,
+            "single_configuration_calculation_to_system_ref": 0,
+            "single_configuration_to_calculation_method_ref": 0,
+            "energy_free": -1.5936767191492225e-18,
+            "energy_total": -1.5935696296699573e-18,
+            "energy_total_T0": -3.2126683561907e-22
+          }
+        ],
+        "section_sampling_method": [
+          {
+            "_name": "section_sampling_method",
+            "_gIndex": 0,
+            "sampling_method": "geometry_optimization"
+          }
+        ],
+        "section_frame_sequence": [
+          {
+            "_name": "section_frame_sequence",
+            "_gIndex": 0,
+            "frame_sequence_to_sampling_ref": 0,
+            "frame_sequence_local_frames_ref": [
+              0
+            ]
+          }
+        ]
+      }
+    ]
+  }
\ No newline at end of file
diff --git a/tests/data/migration/new_upload/new_upload/2/template.json b/tests/data/migration/new_upload/new_upload/2/template.json
new file mode 100644
index 0000000000000000000000000000000000000000..b85b5bd1742a1b1591c39adbd9086565e9ec0cc3
--- /dev/null
+++ b/tests/data/migration/new_upload/new_upload/2/template.json
@@ -0,0 +1,110 @@
+{
+    "section_run": [
+      {
+        "_name": "section_run",
+        "_gIndex": 0,
+        "program_name": "VASP",
+        "program_version": "4.6.35  3Apr08 complex  parallel LinuxIFC",
+        "program_basis_set_type": "plane waves",
+        "section_method": [
+          {
+            "_name": "section_method",
+            "_gIndex": 0,
+
+            "electronic_structure_method": "DFT",
+            "section_XC_functionals": [
+              {
+                "_name": "section_XC_functionals",
+                "_gIndex": 0,
+                "XC_functional_name": "GGA_X_PBE"
+              }
+            ]
+          }
+        ],
+        "section_system": [
+          {
+            "_name": "section_system",
+            "_gIndex": 0,
+            "simulation_cell": [
+              [
+                5.76372622e-10,
+                0.0,
+                0.0
+              ],
+              [
+                0.0,
+                5.76372622e-10,
+                0.0
+              ],
+              [
+                0.0,
+                0.0,
+                4.0755698899999997e-10
+              ]
+            ],
+            "configuration_periodic_dimensions": [
+              true,
+              true,
+              true
+            ],
+            "atom_positions": [
+              [
+                2.88186311e-10,
+                0.0,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                2.88186311e-10,
+                2.0377849449999999e-10
+              ],
+              [
+                0.0,
+                0.0,
+                0.0
+              ],
+              [
+                2.88186311e-10,
+                2.88186311e-10,
+                0.0
+              ]
+            ],
+            "atom_labels": [
+              "Br",
+              "K",
+              "Si",
+              "Si"
+            ]
+          }
+        ],
+        "section_single_configuration_calculation": [
+          {
+            "_name": "section_single_configuration_calculation",
+            "_gIndex": 0,
+            "single_configuration_calculation_to_system_ref": 0,
+            "single_configuration_to_calculation_method_ref": 0,
+            "energy_free": -1.5936767191492225e-18,
+            "energy_total": -1.5935696296699573e-18,
+            "energy_total_T0": -3.2126683561907e-22
+          }
+        ],
+        "section_sampling_method": [
+          {
+            "_name": "section_sampling_method",
+            "_gIndex": 0,
+            "sampling_method": "geometry_optimization"
+          }
+        ],
+        "section_frame_sequence": [
+          {
+            "_name": "section_frame_sequence",
+            "_gIndex": 0,
+            "frame_sequence_to_sampling_ref": 0,
+            "frame_sequence_local_frames_ref": [
+              0
+            ]
+          }
+        ]
+      }
+    ]
+  }
\ No newline at end of file
diff --git a/tests/misc.http b/tests/misc.http
index 7b4546f45b8aa49f81fe7019994197445e701cd8..ecc3a3db345302835c52e94f667d6d2060f9dcfb 100644
--- a/tests/misc.http
+++ b/tests/misc.http
@@ -8,3 +8,8 @@ content-type: application/json
 {
     "name": "aims-example-full"
 }
+###
+
+GET http://localhost:8000/nomad/api/v2/uploads/ HTTP/1.1
+Authorization: Basic bGVvbmFyZC5ob2ZzdGFkdGVyQG5vbWFkLWZhaXJkaS50ZXN0cy5kZTo=
+###
diff --git a/tests/processing/test_base.py b/tests/processing/test_base.py
index f468b6a75a1a1e3e7088bf4d9aaf485d8f0b74b5..a825af4312bea60d88c39593831743478ed95cb3 100644
--- a/tests/processing/test_base.py
+++ b/tests/processing/test_base.py
@@ -13,11 +13,12 @@ from nomad.processing.base import Proc, Chord, process, task, SUCCESS, FAILURE,
 random.seed(0)
 
 
-def assert_proc(proc, current_task, status=SUCCESS, errors=0, warnings=0):
+def assert_proc(proc, current_task, tasks_status=SUCCESS, errors=0, warnings=0):
     assert proc.current_task == current_task
-    assert proc.status == status
+    assert proc.tasks_status == tasks_status
     assert len(proc.errors) == errors
     assert len(proc.warnings) == warnings
+    assert not proc.process_running
 
 
 class Tasks(Proc):
@@ -108,6 +109,25 @@ def test_task_as_proc(worker, no_warn):
     assert_proc(p, 'process')
 
 
+class ProcInProc(Proc):
+    @process
+    @task
+    def one(self):
+        self.two()
+
+    @process
+    @task
+    def two(self):
+        pass
+
+
+def test_fail_on_proc_in_proc(worker):
+    p = ProcInProc.create()
+    p.one()
+    p.block_until_complete()
+    assert_proc(p, 'one', FAILURE, 1)
+
+
 class ParentProc(Chord):
 
     @process
diff --git a/tests/processing/test_data.py b/tests/processing/test_data.py
index 77815b905739f4a37ece4f5cea6f199d164c362c..b44904a053515e0c3bc532176133e2e77d757ce0 100644
--- a/tests/processing/test_data.py
+++ b/tests/processing/test_data.py
@@ -26,10 +26,9 @@ import os.path
 import json
 
 from nomad import utils
-from nomad.files import UploadFile, ArchiveFile, ArchiveLogFile, RepositoryFile
+from nomad.files import ArchiveBasedStagingUploadFiles, UploadFiles, StagingUploadFiles
 from nomad.processing import Upload, Calc
-from nomad.processing.base import task as task_decorator
-from nomad.repo import RepoCalc
+from nomad.processing.base import task as task_decorator, FAILURE, SUCCESS
 
 from tests.test_files import example_file, empty_file
 
@@ -40,7 +39,7 @@ example_files = [empty_file, example_file]
 
 
 @pytest.fixture(scope='function', autouse=True)
-def mocks_forall(mocksearch, mockmongo):
+def mocks_forall(mockmongo):
     pass
 
 
@@ -48,9 +47,8 @@ def mocks_forall(mocksearch, mockmongo):
 def uploaded_id(request, clear_files) -> Generator[str, None, None]:
     example_file = request.param
     example_upload_id = os.path.basename(example_file).replace('.zip', '')
-    upload_file = UploadFile(example_upload_id)
-    upload_file.create_dirs()
-    shutil.copyfile(example_file, upload_file.os_path)
+    upload_files = ArchiveBasedStagingUploadFiles(example_upload_id, create=True)
+    shutil.copyfile(example_file, upload_files.upload_file_os_path)
 
     yield example_upload_id
 
@@ -59,9 +57,8 @@ def uploaded_id(request, clear_files) -> Generator[str, None, None]:
 def uploaded_id_with_warning(request, clear_files) -> Generator[str, None, None]:
     example_file = 'tests/data/proc/examples_with_warning_template.zip'
     example_upload_id = os.path.basename(example_file).replace('.zip', '')
-    upload_file = UploadFile(example_upload_id)
-    upload_file.create_dirs()
-    shutil.copyfile(example_file, upload_file.os_path)
+    upload_files = ArchiveBasedStagingUploadFiles(example_upload_id, create=True)
+    shutil.copyfile(example_file, upload_files.upload_file_os_path)
 
     yield example_upload_id
 
@@ -70,10 +67,10 @@ def run_processing(uploaded_id: str, test_user) -> Upload:
     upload = Upload.create(upload_id=uploaded_id, user=test_user)
     upload.upload_time = datetime.now()
 
-    assert upload.status == 'RUNNING'
+    assert upload.tasks_status == 'RUNNING'
     assert upload.current_task == 'uploading'
 
-    upload.process()  # pylint: disable=E1101
+    upload.process_upload()  # pylint: disable=E1101
     upload.block_until_complete(interval=.1)
 
     return upload
@@ -84,72 +81,55 @@ def processed_upload(uploaded_id, test_user, worker, no_warn) -> Upload:
     return run_processing(uploaded_id, test_user)
 
 
-def assert_processing(upload: Upload, mocksearch=None):
-    assert upload.completed
+def assert_processing(upload: Upload):
+    assert not upload.tasks_running
     assert upload.current_task == 'cleanup'
-    assert upload.upload_hash is not None
+    assert upload.upload_id is not None
     assert len(upload.errors) == 0
-    assert upload.status == 'SUCCESS'
+    assert upload.tasks_status == SUCCESS
+
+    upload_files = UploadFiles.get(upload.upload_id, is_authorized=lambda: True)
+    assert isinstance(upload_files, StagingUploadFiles)
 
     for calc in Calc.objects(upload_id=upload.upload_id):
         assert calc.parser is not None
         assert calc.mainfile is not None
-        assert calc.status == 'SUCCESS', calc.archive_id
+        assert calc.tasks_status == SUCCESS
 
-        archive_file = ArchiveFile(calc.archive_id)
-        assert archive_file.exists()
-        with archive_file.read_archive_json() as archive_json:
+        with upload_files.archive_file(calc.calc_id) as archive_json:
             archive = json.load(archive_json)
         assert 'section_run' in archive
         assert 'section_calculation_info' in archive
 
-        assert ArchiveLogFile(calc.archive_id).exists()
-        with ArchiveLogFile(calc.archive_id).open('rt') as f:
+        with upload_files.archive_log_file(calc.calc_id) as f:
             assert 'a test' in f.read()
         assert len(calc.errors) == 0
 
-        if mocksearch:
-            repo = mocksearch[calc.archive_id]
-            assert repo is not None
-            assert repo.chemical_composition is not None
-            assert repo.basis_set_type is not None
-            assert len(repo.aux_files) == 4
+        with upload_files.raw_file(calc.mainfile) as f:
+            f.read()
 
-    assert RepositoryFile(upload.upload_hash).exists()
+        assert upload_files.metadata.get(calc.calc_id) is not None
 
 
-@pytest.mark.timeout(30)
-def test_processing(uploaded_id, worker, mocksearch, test_user, no_warn):
+# @pytest.mark.timeout(30)
+def test_processing(uploaded_id, worker, test_user, no_warn):
     upload = run_processing(uploaded_id, test_user)
-    assert_processing(upload, mocksearch)
+    assert_processing(upload)
 
 
 @pytest.mark.timeout(30)
-def test_processing_with_warning(uploaded_id_with_warning, worker, test_user, mocksearch):
+def test_processing_with_warning(uploaded_id_with_warning, worker, test_user):
     upload = run_processing(uploaded_id_with_warning, test_user)
-    assert_processing(upload, mocksearch)
-
-
-@pytest.mark.parametrize('uploaded_id', [example_files[1]], indirect=True)
-def test_processing_doublets(uploaded_id, worker, test_user, with_error):
-
-    upload = run_processing(uploaded_id, test_user)
-    assert upload.status == 'SUCCESS'
-    assert RepoCalc.upload_exists(upload.upload_hash)  # pylint: disable=E1101
-
-    upload = run_processing(uploaded_id, test_user)
-    assert upload.status == 'FAILURE'
-    assert len(upload.errors) > 0
-    assert 'already' in upload.errors[0]
+    assert_processing(upload)
 
 
 @pytest.mark.timeout(30)
 def test_process_non_existing(worker, test_user, with_error):
     upload = run_processing('__does_not_exist', test_user)
 
-    assert upload.completed
+    assert not upload.tasks_running
     assert upload.current_task == 'extracting'
-    assert upload.status == 'FAILURE'
+    assert upload.tasks_status == FAILURE
     assert len(upload.errors) > 0
 
 
@@ -174,20 +154,20 @@ def test_task_failure(monkeypatch, uploaded_id, worker, task, test_user, with_er
     # run the test
     upload = run_processing(uploaded_id, test_user)
 
-    assert upload.completed
+    assert not upload.tasks_running
 
     if task != 'parsing':
-        assert upload.status == 'FAILURE'
+        assert upload.tasks_status == FAILURE
         assert upload.current_task == task
         assert len(upload.errors) > 0
     else:
         # there is an empty example with no calcs, even if past parsing_all task
         utils.get_logger(__name__).error('fake')
         if upload.total_calcs > 0:  # pylint: disable=E1101
-            assert upload.status == 'SUCCESS'
+            assert upload.tasks_status == SUCCESS
             assert upload.current_task == 'cleanup'
             assert len(upload.errors) == 0
             for calc in upload.all_calcs(0, 100):  # pylint: disable=E1101
-                assert calc.status == 'FAILURE'
+                assert calc.tasks_status == FAILURE
                 assert calc.current_task == 'parsing'
                 assert len(calc.errors) > 0
diff --git a/tests/test_api.py b/tests/test_api.py
index d86004aa8abce94bee24342f02f9b08a74a72604..fc0a58d9a9a4eba8ef1fb37328542ea8d565ff2d 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -1,15 +1,26 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 import pytest
 import time
 import json
-import zlib
-import re
-import os.path
 from mongoengine import connect
 from mongoengine.connection import disconnect
-from datetime import datetime, timedelta
 import base64
 import zipfile
 import io
+import inspect
 
 from nomad import config
 # for convinience we test the api without path prefix
@@ -17,18 +28,19 @@ services_config = config.services._asdict()
 services_config.update(api_base_path='')
 config.services = config.NomadServicesConfig(**services_config)
 
-from nomad import api  # noqa
-from nomad.files import UploadFile  # noqa
-from nomad.processing import Upload  # noqa
+from nomad import api, coe_repo  # noqa
+from nomad.files import UploadFiles, PublicUploadFiles  # noqa
+from nomad.processing import Upload, Calc, SUCCESS  # noqa
+from nomad.coe_repo import User  # noqa
 
 from tests.processing.test_data import example_files  # noqa
 from tests.test_files import example_file, example_file_mainfile, example_file_contents  # noqa
+from tests.test_files import create_staging_upload, create_public_upload  # noqa
 
 # import fixtures
-from tests.test_files import clear_files, archive, archive_log, archive_config  # noqa pylint: disable=unused-import
 from tests.test_normalizing import normalized_template_example  # noqa pylint: disable=unused-import
 from tests.test_parsing import parsed_template_example  # noqa pylint: disable=unused-import
-from tests.test_repo import example_elastic_calc  # noqa pylint: disable=unused-import
+# from tests.test_repo import example_elastic_calc  # noqa pylint: disable=unused-import
 from tests.test_coe_repo import assert_coe_upload  # noqa
 
 
@@ -53,385 +65,555 @@ def create_auth_headers(user):
     }
 
 
-@pytest.fixture(scope='session')
-def test_user_auth(test_user):
+@pytest.fixture(scope='module')
+def test_user_auth(test_user: User):
     return create_auth_headers(test_user)
 
 
-@pytest.fixture(scope='session')
-def test_other_user_auth(other_test_user):
+@pytest.fixture(scope='module')
+def test_other_user_auth(other_test_user: User):
     return create_auth_headers(other_test_user)
 
 
-def assert_uploads(upload_json_str, count=0, **kwargs):
-    data = json.loads(upload_json_str)
-    assert isinstance(data, list)
-    assert len(data) == count
-
-    if count > 0:
-        assert_upload(json.dumps(data[0]), **kwargs)
-
-
-def assert_upload(upload_json_str, id=None, **kwargs):
-    data = json.loads(upload_json_str)
-    assert 'upload_id' in data
-    if id is not None:
-        assert id == data['upload_id']
-    assert 'create_time' in data
-    assert 'upload_url' in data
-    assert 'upload_command' in data
-
-    for key, value in kwargs.items():
-        assert data.get(key, None) == value
-
-    return data
-
-
-def test_xtoken_auth(client, test_user, no_warn):
-    rv = client.get('/uploads', headers={
-        'X-Token': test_user.email
-    })
-
-    assert rv.status_code == 200
-
-
-def test_xtoken_auth_denied(client, no_warn):
-    rv = client.get('/uploads', headers={
-        'X-Token': 'invalid'
-    })
-
-    assert rv.status_code == 401
-
-
-def test_basic_auth(client, test_user_auth, no_warn):
-    rv = client.get('/uploads', headers=test_user_auth)
-    assert rv.status_code == 200
+@pytest.fixture(scope='module')
+def admin_user_auth(admin_user: User):
+    return create_auth_headers(admin_user)
 
 
-def test_basic_auth_denied(client, no_warn):
-    basic_auth_base64 = base64.b64encode('invalid'.encode('utf-8')).decode('utf-8')
-    rv = client.get('/uploads', headers={
-        'Authorization': 'Basic %s' % basic_auth_base64
-    })
-    assert rv.status_code == 401
-
-
-def test_no_uploads(client, test_user_auth, no_warn):
-    rv = client.get('/uploads', headers=test_user_auth)
-
+@pytest.fixture(scope='function')
+def test_user_signature_token(client, test_user_auth):
+    rv = client.get('/auth/token', headers=test_user_auth)
     assert rv.status_code == 200
-    assert_uploads(rv.data, count=0)
-
+    return json.loads(rv.data)['token']
 
-def test_not_existing_upload(client, test_user_auth, no_warn):
-    rv = client.get('/uploads/123456789012123456789012', headers=test_user_auth)
-    assert rv.status_code == 404
 
+class TestAdmin:
 
-def test_stale_upload(client, test_user_auth):
-    rv = client.post(
-        '/uploads',
-        headers=test_user_auth,
-        data=json.dumps(dict(name='test_name')),
-        content_type='application/json')
-    assert rv.status_code == 200
-    upload_id = assert_upload(rv.data)['upload_id']
+    @pytest.mark.timeout(10)
+    def test_reset(self, client, admin_user_auth, expandable_repo_db):
+        rv = client.post('/admin/reset', headers=admin_user_auth)
+        assert rv.status_code == 200
 
-    upload = Upload.get(upload_id)
-    upload.create_time = datetime.now() - timedelta(days=2)
-    upload.save()
+    # TODO disabled as this will destroy the session repository_db beyond repair.
+    @pytest.mark.timeout(10)
+    def test_remove(self, client, admin_user_auth, expandable_repo_db):
+        rv = client.post('/admin/remove', headers=admin_user_auth)
+        assert rv.status_code == 200
 
-    rv = client.get('/uploads/%s' % upload_id, headers=test_user_auth)
-    assert rv.status_code == 200
-    assert_upload(rv.data, is_stale=True)
+    def test_doesnotexist(self, client, admin_user_auth):
+        rv = client.post('/admin/doesnotexist', headers=admin_user_auth)
+        assert rv.status_code == 404
 
+    def test_only_admin(self, client, test_user_auth):
+        rv = client.post('/admin/reset', headers=test_user_auth)
+        assert rv.status_code == 401
+
+    @pytest.fixture(scope='function')
+    def disable_reset(self, monkeypatch):
+        old_config = config.services
+        new_config = config.NomadServicesConfig(
+            config.services.api_host,
+            config.services.api_port,
+            config.services.api_base_path,
+            config.services.api_secret,
+            config.services.admin_password,
+            True)
+        monkeypatch.setattr(config, 'services', new_config)
+        yield None
+        monkeypatch.setattr(config, 'services', old_config)
+
+    def test_disabled(self, client, admin_user_auth, disable_reset, repository_db):
+        rv = client.post('/admin/reset', headers=admin_user_auth)
+        assert rv.status_code == 400
+
+
+class TestAuth:
+    def test_xtoken_auth(self, client, test_user: User, no_warn):
+        rv = client.get('/uploads/', headers={
+            'X-Token': test_user.first_name.lower()  # the test users have their firstname as tokens for convinience
+        })
 
-def test_create_upload(client, test_user_auth, no_warn):
-    rv = client.post('/uploads', headers=test_user_auth)
+        assert rv.status_code == 200
 
-    assert rv.status_code == 200
-    upload_id = assert_upload(rv.data)['upload_id']
+    def test_xtoken_auth_denied(self, client, no_warn, repository_db):
+        rv = client.get('/uploads/', headers={
+            'X-Token': 'invalid'
+        })
 
-    rv = client.get('/uploads/%s' % upload_id, headers=test_user_auth)
-    assert rv.status_code == 200
-    assert_upload(rv.data, id=upload_id, is_stale=False)
+        assert rv.status_code == 401
 
-    rv = client.get('/uploads', headers=test_user_auth)
-    assert rv.status_code == 200
-    assert_uploads(rv.data, count=1, id=upload_id)
+    def test_basic_auth(self, client, test_user_auth, no_warn):
+        rv = client.get('/uploads/', headers=test_user_auth)
+        assert rv.status_code == 200
 
+    def test_basic_auth_denied(self, client, no_warn):
+        basic_auth_base64 = base64.b64encode('invalid'.encode('utf-8')).decode('utf-8')
+        rv = client.get('/uploads/', headers={
+            'Authorization': 'Basic %s' % basic_auth_base64
+        })
+        assert rv.status_code == 401
 
-def test_create_upload_with_name(client, test_user_auth, no_warn):
-    rv = client.post(
-        '/uploads', headers=test_user_auth,
-        data=json.dumps(dict(name='test_name')),
-        content_type='application/json')
+    def test_get_user(self, client, test_user_auth, test_user: User, no_warn):
+        rv = client.get('/auth/user', headers=test_user_auth)
+        assert rv.status_code == 200
+        user = json.loads(rv.data)
+        for key in ['first_name', 'last_name', 'email', 'token']:
+            assert key in user
 
-    assert rv.status_code == 200
-    upload = assert_upload(rv.data)
-    assert upload['name'] == 'test_name'
+        rv = client.get('/uploads/', headers={
+            'X-Token': user['token']
+        })
 
+        assert rv.status_code == 200
 
-def test_create_upload_with_local_path(client, test_user_auth, no_warn):
-    rv = client.post(
-        '/uploads', headers=test_user_auth,
-        data=json.dumps(dict(local_path='test_local_path')),
-        content_type='application/json')
+    def test_signature_token(self, test_user_signature_token, no_warn):
+        assert test_user_signature_token is not None
+
+
+class TestUploads:
+
+    @pytest.fixture(scope='function')
+    def proc_infra(self, repository_db, worker, no_warn):
+        return dict(repository_db=repository_db)
+
+    def assert_uploads(self, upload_json_str, count=0, **kwargs):
+        data = json.loads(upload_json_str)
+        assert isinstance(data, list)
+        assert len(data) == count
+
+        if count > 0:
+            self.assert_upload(json.dumps(data[0]), **kwargs)
+
+    def assert_upload(self, upload_json_str, id=None, **kwargs):
+        data = json.loads(upload_json_str)
+        assert 'upload_id' in data
+        if id is not None:
+            assert id == data['upload_id']
+        assert 'create_time' in data
+
+        for key, value in kwargs.items():
+            assert data.get(key, None) == value
+
+        return data
+
+    def assert_processing(self, client, test_user_auth, upload_id):
+        upload_endpoint = '/uploads/%s' % upload_id
+
+        # poll until completed
+        while True:
+            time.sleep(0.1)
+            rv = client.get(upload_endpoint, headers=test_user_auth)
+            assert rv.status_code == 200
+            upload = self.assert_upload(rv.data)
+            assert 'upload_time' in upload
+            if not upload['tasks_running']:
+                break
+
+        assert len(upload['tasks']) == 4
+        assert upload['tasks_status'] == SUCCESS
+        assert upload['current_task'] == 'cleanup'
+        assert not upload['process_running']
+        upload_files = UploadFiles.get(upload['upload_id'])
+        assert upload_files is not None
+        calcs = upload['calcs']['results']
+        for calc in calcs:
+            assert calc['tasks_status'] == SUCCESS
+            assert calc['current_task'] == 'archiving'
+            assert len(calc['tasks']) == 3
+            assert client.get('/archive/logs/%s/%s' % (calc['upload_id'], calc['calc_id']), headers=test_user_auth).status_code == 200
+
+        if upload['calcs']['pagination']['total'] > 1:
+            rv = client.get('%s?page=2&per_page=1&order_by=tasks_status' % upload_endpoint, headers=test_user_auth)
+            assert rv.status_code == 200
+            upload = self.assert_upload(rv.data)
+            assert len(upload['calcs']['results']) == 1
+
+    def assert_unstage(self, client, test_user_auth, upload_id, proc_infra, metadata={}):
+        rv = client.get('/uploads/%s' % upload_id, headers=test_user_auth)
+        upload = self.assert_upload(rv.data)
+        empty_upload = upload['calcs']['pagination']['total'] == 0
+
+        rv = client.post(
+            '/uploads/%s' % upload_id,
+            headers=test_user_auth,
+            data=json.dumps(dict(command='commit', metadata=metadata)),
+            content_type='application/json')
+        assert rv.status_code == 200
+        upload = self.assert_upload(rv.data)
+        assert upload['current_process'] == 'commit_upload'
+        assert upload['process_running']
+
+        self.assert_upload_does_not_exist(client, upload_id, test_user_auth)
+        assert_coe_upload(upload_id, empty=empty_upload, metadata=metadata)
+
+    def assert_upload_does_not_exist(self, client, upload_id: str, test_user_auth):
+        # poll until commit/delete completed
+        while True:
+            time.sleep(0.1)
+            rv = client.get('/uploads/%s' % upload_id, headers=test_user_auth)
+            if rv.status_code == 200:
+                upload = self.assert_upload(rv.data)
+                assert upload['process_running']
+            elif rv.status_code == 404:
+                break
+            else:
+                assert False
+
+        rv = client.get('/uploads/%s' % upload_id, headers=test_user_auth)
+        assert rv.status_code == 404
+        assert Upload.objects(upload_id=upload_id).first() is None
+        assert Calc.objects(upload_id=upload_id).count() is 0
+        upload_files = UploadFiles.get(upload_id)
+        assert upload_files is None or isinstance(upload_files, PublicUploadFiles)
 
-    assert rv.status_code == 200
-    upload = assert_upload(rv.data)
-    assert upload['local_path'] == 'test_local_path'
+    def test_get_command(self, client, test_user_auth, no_warn):
+        rv = client.get('/uploads/command', headers=test_user_auth)
+        assert rv.status_code == 200
+        data = json.loads(rv.data)
+        assert 'upload_command' in data
+        assert 'upload_url' in data
 
+    def test_get_empty(self, client, test_user_auth, no_warn):
+        rv = client.get('/uploads/', headers=test_user_auth)
 
-def test_delete_empty_upload(client, mocksearch, test_user_auth, no_warn):
-    rv = client.post('/uploads', headers=test_user_auth)
+        assert rv.status_code == 200
+        self.assert_uploads(rv.data, count=0)
 
-    assert rv.status_code == 200
-    upload_id = assert_upload(rv.data)['upload_id']
+    def test_get_not_existing(self, client, test_user_auth, no_warn):
+        rv = client.get('/uploads/123456789012123456789012', headers=test_user_auth)
+        assert rv.status_code == 404
 
-    rv = client.delete('/uploads/%s' % upload_id, headers=test_user_auth)
-    assert rv.status_code == 200
+    @pytest.mark.timeout(30)
+    @pytest.mark.parametrize('file', example_files)
+    @pytest.mark.parametrize('mode', ['multipart', 'stream', 'local_path'])
+    @pytest.mark.parametrize('name', [None, 'test_name'])
+    def test_put(self, client, test_user_auth, proc_infra, file, mode, name):
+        if name:
+            url = '/uploads/?name=%s' % name
+        else:
+            url = '/uploads/'
+
+        if mode == 'multipart':
+            rv = client.put(
+                url, data=dict(file=(open(file, 'rb'), 'file')), headers=test_user_auth)
+        elif mode == 'stream':
+            with open(file, 'rb') as f:
+                rv = client.put(url, data=f.read(), headers=test_user_auth)
+        elif mode == 'local_path':
+            url += '&' if name else '?'
+            url += 'local_path=%s' % file
+            rv = client.put(url, headers=test_user_auth)
+        else:
+            assert False
 
-    rv = client.get('/uploads/%s' % upload_id, headers=test_user_auth)
-    assert rv.status_code == 404
+        assert rv.status_code == 200
+        if mode == 'local_path':
+            upload = self.assert_upload(rv.data, local_path=file, name=name)
+        else:
+            upload = self.assert_upload(rv.data, name=name)
+        assert upload['tasks_running']
 
+        self.assert_processing(client, test_user_auth, upload['upload_id'])
 
-def assert_processing(client, test_user_auth, upload_id, repository_db):
-    upload_endpoint = '/uploads/%s' % upload_id
+    def test_delete_not_existing(self, client, test_user_auth, no_warn):
+        rv = client.delete('/uploads/123456789012123456789012', headers=test_user_auth)
+        assert rv.status_code == 404
 
-    while True:
-        time.sleep(0.1)
+    @pytest.fixture(scope='function')
+    def slow_processing(self, monkeypatch):
+        old_cleanup = Upload.cleanup
+
+        def slow_cleanup(self):
+            time.sleep(0.5)
+            old_cleanup(self)
+
+        monkeypatch.setattr('nomad.processing.data.Upload.cleanup', slow_cleanup)
+        yield True
+        monkeypatch.setattr('nomad.processing.data.Upload.cleanup', old_cleanup)
+
+    def test_delete_during_processing(self, client, test_user_auth, proc_infra, slow_processing):
+        rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth)
+        upload = self.assert_upload(rv.data)
+        assert upload['tasks_running']
+        rv = client.delete('/uploads/%s' % upload['upload_id'], headers=test_user_auth)
+        assert rv.status_code == 400
+        self.assert_processing(client, test_user_auth, upload['upload_id'])
+
+    def test_delete_unstaged(self, client, test_user_auth, proc_infra, clean_repository_db):
+        rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth)
+        upload = self.assert_upload(rv.data)
+        self.assert_processing(client, test_user_auth, upload['upload_id'])
+        self.assert_unstage(client, test_user_auth, upload['upload_id'], proc_infra)
+        rv = client.delete('/uploads/%s' % upload['upload_id'], headers=test_user_auth)
+        assert rv.status_code == 404
 
-        rv = client.get(upload_endpoint, headers=test_user_auth)
+    def test_delete(self, client, test_user_auth, proc_infra):
+        rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth)
+        upload = self.assert_upload(rv.data)
+        self.assert_processing(client, test_user_auth, upload['upload_id'])
+        rv = client.delete('/uploads/%s' % upload['upload_id'], headers=test_user_auth)
         assert rv.status_code == 200
-        upload = assert_upload(rv.data)
-        assert 'upload_time' in upload
-        if upload['completed']:
-            break
-
-    assert len(upload['tasks']) == 4
-    assert upload['status'] == 'SUCCESS'
-    assert upload['current_task'] == 'cleanup'
-    assert UploadFile(upload['upload_id'], upload.get('local_path')).exists()
-    calcs = upload['calcs']['results']
-    for calc in calcs:
-        assert calc['status'] == 'SUCCESS'
-        assert calc['current_task'] == 'archiving'
-        assert len(calc['tasks']) == 3
-        assert client.get('/logs/%s' % calc['archive_id']).status_code == 200
-
-    empty_upload = upload['calcs']['pagination']['total'] == 0
-
-    if upload['calcs']['pagination']['total'] > 1:
-        rv = client.get('%s?page=2&per_page=1&order_by=status' % upload_endpoint)
+        self.assert_upload_does_not_exist(client, upload['upload_id'], test_user_auth)
+
+    @pytest.mark.parametrize('example_file', example_files)
+    def test_post(self, client, test_user_auth, example_file, proc_infra, clean_repository_db):
+        rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth)
+        upload = self.assert_upload(rv.data)
+        self.assert_processing(client, test_user_auth, upload['upload_id'])
+        self.assert_unstage(client, test_user_auth, upload['upload_id'], proc_infra)
+
+    def test_post_metadata(
+            self, client, proc_infra, admin_user_auth, test_user_auth, test_user,
+            other_test_user, clean_repository_db):
+        rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth)
+        upload = self.assert_upload(rv.data)
+        self.assert_processing(client, test_user_auth, upload['upload_id'])
+        metadata = dict(comment='test comment')
+        self.assert_unstage(client, admin_user_auth, upload['upload_id'], proc_infra, metadata)
+
+    def test_post_metadata_forbidden(self, client, proc_infra, test_user_auth, clean_repository_db):
+        rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth)
+        upload = self.assert_upload(rv.data)
+        self.assert_processing(client, test_user_auth, upload['upload_id'])
+        rv = client.post(
+            '/uploads/%s' % upload['upload_id'],
+            headers=test_user_auth,
+            data=json.dumps(dict(command='commit', metadata=dict(_pid=256))),
+            content_type='application/json')
+        assert rv.status_code == 401
+
+    # TODO validate metadata (or all input models in API for that matter)
+    # def test_post_bad_metadata(self, client, proc_infra, test_user_auth, clean_repository_db):
+    #     rv = client.put('/uploads/?local_path=%s' % example_file, headers=test_user_auth)
+    #     upload = self.assert_upload(rv.data)
+    #     self.assert_processing(client, test_user_auth, upload['upload_id'])
+    #     rv = client.post(
+    #         '/uploads/%s' % upload['upload_id'],
+    #         headers=test_user_auth,
+    #         data=json.dumps(dict(command='commit', metadata=dict(doesnotexist='hi'))),
+    #         content_type='application/json')
+    #     assert rv.status_code == 400
+
+
+class UploadFilesBasedTests:
+
+    @staticmethod
+    def fix_signature(func, wrapper):
+        additional_args = list(inspect.signature(func).parameters.values())[4:]
+        wrapper_sig = inspect.signature(wrapper)
+        wrapper_args = list(wrapper_sig.parameters.values())[:3] + additional_args
+        wrapper_sig = wrapper_sig.replace(parameters=tuple(wrapper_args))
+        wrapper.__signature__ = wrapper_sig
+
+    @staticmethod
+    def check_authorizaton(func):
+        @pytest.mark.parametrize('test_data', [
+            [True, None, True],     # in staging for upload
+            [True, None, False],    # in staging for different user
+            [True, None, None],     # in staging for guest
+            [False, True, True],    # in public, restricted for uploader
+            [False, True, False],   # in public, restricted for different user
+            [False, True, None],    # in public, restricted for guest
+            [False, False, True],   # in public, public, for uploader
+            [False, False, False],  # in public, public, for different user
+            [False, False, None]    # in public, public, for guest
+        ], indirect=True)
+        def wrapper(self, client, test_data, *args, **kwargs):
+            upload, authorized, auth_headers = test_data
+            try:
+                func(self, client, upload, auth_headers, *args, **kwargs)
+            except AssertionError as assertion:
+                assertion_str = str(assertion)
+                if not authorized:
+                    if '0 == 5' in assertion_str and 'ZipFile' in assertion_str:
+                        # the user is not authorized an gets an empty zip as expected
+                        return
+                    if '401' in assertion_str:
+                        # the user is not authorized and gets a 401 as expected
+                        return
+                raise assertion
+
+            if not authorized:
+                assert False
+        UploadFilesBasedTests.fix_signature(func, wrapper)
+        return wrapper
+
+    @staticmethod
+    def ignore_authorization(func):
+        @pytest.mark.parametrize('test_data', [
+            [True, None, True],      # in staging
+            [False, False, None],    # in public
+        ], indirect=True)
+        def wrapper(self, client, test_data, *args, **kwargs):
+            upload, _, auth_headers = test_data
+            func(self, client, upload, auth_headers, *args, **kwargs)
+        UploadFilesBasedTests.fix_signature(func, wrapper)
+        return wrapper
+
+    @pytest.fixture(scope='function')
+    def test_data(self, request, clean_repository_db, no_warn, test_user, other_test_user):
+        # delete potential old test files
+        for _ in [0, 1]:
+            upload_files = UploadFiles.get('test_upload')
+            if upload_files:
+                upload_files.delete()
+
+        in_staging, restricted, for_uploader = request.param
+
+        if in_staging:
+            authorized = for_uploader
+        else:
+            authorized = not restricted or for_uploader
+
+        if for_uploader:
+            auth_headers = create_auth_headers(test_user)
+        elif for_uploader is False:
+            auth_headers = create_auth_headers(other_test_user)
+        else:
+            auth_headers = None
+
+        calc_specs = 'r' if restricted else 'p'
+        if in_staging:
+            Upload.create(user=test_user, upload_id='test_upload')
+            upload_files = create_staging_upload('test_upload', calc_specs=calc_specs)
+        else:
+            upload_files = create_public_upload('test_upload', calc_specs=calc_specs)
+            clean_repository_db.begin()
+            coe_upload = coe_repo.Upload(
+                upload_name='test_upload',
+                user_id=test_user.user_id, is_processed=True)
+            clean_repository_db.add(coe_upload)
+            clean_repository_db.commit()
+
+        yield 'test_upload', authorized, auth_headers
+
+        upload_files.delete()
+
+
+class TestArchive(UploadFilesBasedTests):
+    @UploadFilesBasedTests.check_authorizaton
+    def test_get(self, client, upload, auth_headers):
+        rv = client.get('/archive/%s/0' % upload, headers=auth_headers)
         assert rv.status_code == 200
-        upload = assert_upload(rv.data)
-        assert len(upload['calcs']['results']) == 1
-
-    rv = client.post(
-        upload_endpoint,
-        headers=test_user_auth,
-        data=json.dumps(dict(operation='unstage')),
-        content_type='application/json')
-    assert rv.status_code == 200
-
-    rv = client.get('/uploads', headers=test_user_auth)
-    assert rv.status_code == 200
-    assert_uploads(rv.data, count=0)
-    assert_coe_upload(upload['upload_hash'], repository_db, empty=empty_upload)
-
-
-@pytest.mark.parametrize('file', example_files)
-@pytest.mark.parametrize('mode', ['multipart', 'stream'])
-@pytest.mark.timeout(10)
-def test_processing(client, file, mode, worker, mocksearch, test_user_auth, no_warn, repository_db):
-    rv = client.post('/uploads', headers=test_user_auth)
-    assert rv.status_code == 200
-    upload = assert_upload(rv.data)
-    upload_id = upload['upload_id']
-
-    upload_cmd = upload['upload_command']
-    headers = dict(Authorization='Basic %s' % re.search(r'.*Authorization: Basic ([^\s]+).*', upload_cmd).group(1))
-    upload_endpoint = '/uploads/%s' % upload_id
-    upload_file_endpoint = '%s/file' % upload_endpoint
-
-    upload_url = upload['upload_url']
-    assert upload_url.endswith(upload_file_endpoint)
-    if mode == 'multipart':
-        rv = client.put(
-            upload_file_endpoint,
-            data=dict(file=(open(file, 'rb'), 'file')),
-            headers=headers)
-    elif mode == 'stream':
-        with open(file, 'rb') as f:
-            rv = client.put(upload_file_endpoint, data=f.read(), headers=headers)
-    else:
-        assert False
-    assert rv.status_code == 200
-    upload = assert_upload(rv.data)
-
-    assert_processing(client, test_user_auth, upload_id, repository_db)
-
-
-@pytest.mark.parametrize('file', example_files)
-@pytest.mark.timeout(10)
-def test_processing_local_path(client, file, worker, mocksearch, test_user_auth, no_warn, repository_db):
-    rv = client.post(
-        '/uploads', headers=test_user_auth,
-        data=json.dumps(dict(local_path=file)),
-        content_type='application/json')
-
-    assert rv.status_code == 200
-    upload = assert_upload(rv.data)
-    upload_id = upload['upload_id']
-
-    assert_processing(client, test_user_auth, upload_id, repository_db)
-
-
-@pytest.mark.parametrize('file', example_files)
-@pytest.mark.parametrize('mode', ['multipart', 'stream'])
-@pytest.mark.timeout(10)
-def test_processing_upload(client, file, mode, worker, mocksearch, test_user_auth, no_warn, repository_db):
-    if mode == 'multipart':
-        rv = client.put(
-            '/uploads',
-            data=dict(file=(open(file, 'rb'), 'file')),
-            headers=test_user_auth)
-    elif mode == 'stream':
-        with open(file, 'rb') as f:
-            rv = client.put('/uploads', data=f.read(), headers=test_user_auth)
-    else:
-        assert False
-    assert rv.status_code == 200
-    upload = assert_upload(rv.data)
-    upload_id = upload['upload_id']
-
-    assert_processing(client, test_user_auth, upload_id, repository_db)
-
-
-def test_repo_calc(client, example_elastic_calc, no_warn):
-    rv = client.get(
-        '/repo/%s/%s' % (example_elastic_calc.upload_hash, example_elastic_calc.calc_hash))
-    assert rv.status_code == 200
-
-
-def test_non_existing_repo_cals(client, no_warn):
-    rv = client.get('/repo/doesnt/exist')
-    assert rv.status_code == 404
-
-
-def test_repo_calcs(client, example_elastic_calc, no_warn):
-    rv = client.get('/repo')
-    assert rv.status_code == 200
-    data = json.loads(rv.data)
-    results = data.get('results', None)
-    assert results is not None
-    assert isinstance(results, list)
-    assert len(results) >= 1
-
-
-def test_repo_calcs_pagination(client, example_elastic_calc, no_warn):
-    rv = client.get('/repo?page=1&per_page=1')
-    assert rv.status_code == 200
-    data = json.loads(rv.data)
-    results = data.get('results', None)
-    assert results is not None
-    assert isinstance(results, list)
-    assert len(results) == 1
-
-
-def test_repo_calcs_user(client, example_elastic_calc, test_user_auth, no_warn):
-    rv = client.get('/repo?owner=user', headers=test_user_auth)
-    assert rv.status_code == 200
-    data = json.loads(rv.data)
-    results = data.get('results', None)
-    assert results is not None
-    assert len(results) >= 1
-
-
-def test_repo_calcs_user_authrequired(client, example_elastic_calc, no_warn):
-    rv = client.get('/repo?owner=user')
-    assert rv.status_code == 401
-
-
-def test_repo_calcs_user_invisible(client, example_elastic_calc, test_other_user_auth, no_warn):
-    rv = client.get('/repo?owner=user', headers=test_other_user_auth)
-    assert rv.status_code == 200
-    data = json.loads(rv.data)
-    results = data.get('results', None)
-    assert results is not None
-    assert len(results) == 0
-
-
-def test_get_archive(client, archive, no_warn):
-    rv = client.get('/archive/%s' % archive.object_id)
-
-    if rv.headers.get('Content-Encoding') == 'gzip':
-        json.loads(zlib.decompress(rv.data, 16 + zlib.MAX_WBITS))
-    else:
-        json.loads(rv.data)
-
-    assert rv.status_code == 200
-
-
-def test_get_calc_proc_log(client, archive_log, no_warn):
-    rv = client.get('/logs/%s' % archive_log.object_id)
-
-    assert len(rv.data) > 0
-    assert rv.status_code == 200
+        assert json.loads(rv.data) is not None
 
+    @UploadFilesBasedTests.ignore_authorization
+    def test_get_signed(self, client, upload, _, test_user_signature_token):
+        rv = client.get('/archive/%s/0?token=%s' % (upload, test_user_signature_token))
+        assert rv.status_code == 200
+        assert json.loads(rv.data) is not None
 
-def test_get_non_existing_archive(client, no_warn):
-    rv = client.get('/archive/%s' % 'doesnt/exist')
-    assert rv.status_code == 404
+    @UploadFilesBasedTests.check_authorizaton
+    def test_get_calc_proc_log(self, client, upload, auth_headers):
+        rv = client.get('/archive/logs/%s/0' % upload, headers=auth_headers)
+        assert rv.status_code == 200
+        assert len(rv.data) > 0
 
+    @UploadFilesBasedTests.ignore_authorization
+    def test_get_calc_proc_log_signed(self, client, upload, _, test_user_signature_token):
+        rv = client.get('/archive/logs/%s/0?token=%s' % (upload, test_user_signature_token))
+        assert rv.status_code == 200
+        assert len(rv.data) > 0
 
-def test_docs(client):
-    rv = client.get('/docs/introduction.html')
-    assert rv.status_code == 200
+    @UploadFilesBasedTests.ignore_authorization
+    def test_get_non_existing_archive(self, client, upload, auth_headers):
+        rv = client.get('/archive/%s' % 'doesnt/exist', headers=auth_headers)
+        assert rv.status_code == 404
 
+    def test_get_metainfo(self, client):
+        rv = client.get('/archive/metainfo/all.nomadmetainfo.json')
+        assert rv.status_code == 200
 
-class TestRaw:
 
-    @pytest.fixture
-    def example_upload_hash(self, mockmongo, no_warn):
-        upload = Upload(id='test_upload_id', local_path=os.path.abspath(example_file))
-        upload.create_time = datetime.now()
-        upload.user_id = 'does@not.exist'
-        upload.save()
+class TestRepo(UploadFilesBasedTests):
+    @UploadFilesBasedTests.ignore_authorization
+    def test_calc(self, client, upload, auth_headers):
+        rv = client.get('/repo/%s/0' % upload, headers=auth_headers)
+        assert rv.status_code == 200
 
-        with UploadFile(upload.upload_id, local_path=upload.local_path) as upload_file:
-            upload_file.persist()
-            upload_hash = upload_file.upload_hash()
+    @UploadFilesBasedTests.ignore_authorization
+    def test_non_existing_calcs(self, client, upload, auth_headers):
+        rv = client.get('/repo/doesnt/exist', headers=auth_headers)
+        assert rv.status_code == 404
 
-        return upload_hash
+    # def test_calcs(self, client, example_elastic_calc, no_warn):
+    #     rv = client.get('/repo/')
+    #     assert rv.status_code == 200
+    #     data = json.loads(rv.data)
+    #     results = data.get('results', None)
+    #     assert results is not None
+    #     assert isinstance(results, list)
+    #     assert len(results) >= 1
+
+    # def test_calcs_pagination(self, client, example_elastic_calc, no_warn):
+    #     rv = client.get('/repo/?page=1&per_page=1')
+    #     assert rv.status_code == 200
+    #     data = json.loads(rv.data)
+    #     results = data.get('results', None)
+    #     assert results is not None
+    #     assert isinstance(results, list)
+    #     assert len(results) == 1
+
+    # def test_calcs_user(self, client, example_elastic_calc, test_user_auth, no_warn):
+    #     rv = client.get('/repo/?owner=user', headers=test_user_auth)
+    #     assert rv.status_code == 200
+    #     data = json.loads(rv.data)
+    #     results = data.get('results', None)
+    #     assert results is not None
+    #     assert len(results) >= 1
+
+    # def test_calcs_user_authrequired(self, client, example_elastic_calc, no_warn):
+    #     rv = client.get('/repo/?owner=user')
+    #     assert rv.status_code == 401
+
+    # def test_calcs_user_invisible(self, client, example_elastic_calc, test_other_user_auth, no_warn):
+    #     rv = client.get('/repo/?owner=user', headers=test_other_user_auth)
+    #     assert rv.status_code == 200
+    #     data = json.loads(rv.data)
+    #     results = data.get('results', None)
+    #     assert results is not None
+    #     assert len(results) == 0
+
+
+class TestRaw(UploadFilesBasedTests):
+
+    @UploadFilesBasedTests.check_authorizaton
+    def test_raw_file(self, client, upload, auth_headers):
+        url = '/raw/%s/%s' % (upload, example_file_mainfile)
+        rv = client.get(url, headers=auth_headers)
+        assert rv.status_code == 200
+        assert len(rv.data) > 0
 
-    def test_raw_file(self, client, example_upload_hash):
-        url = '/raw/%s/data/%s' % (example_upload_hash, example_file_mainfile)
+    @UploadFilesBasedTests.ignore_authorization
+    def test_raw_file_signed(self, client, upload, _, test_user_signature_token):
+        url = '/raw/%s/%s?token=%s' % (upload, example_file_mainfile, test_user_signature_token)
         rv = client.get(url)
         assert rv.status_code == 200
         assert len(rv.data) > 0
 
-    def test_raw_file_missing_file(self, client, example_upload_hash):
-        url = '/raw/%s/does/not/exist' % example_upload_hash
-        rv = client.get(url)
+    @UploadFilesBasedTests.ignore_authorization
+    def test_raw_file_missing_file(self, client, upload, auth_headers):
+        url = '/raw/%s/does/not/exist' % upload
+        rv = client.get(url, headers=auth_headers)
         assert rv.status_code == 404
         data = json.loads(rv.data)
         assert 'files' not in data
 
-    def test_raw_file_listing(self, client, example_upload_hash):
-        url = '/raw/%s/data/examples' % example_upload_hash
-        rv = client.get(url)
+    @UploadFilesBasedTests.ignore_authorization
+    def test_raw_file_listing(self, client, upload, auth_headers):
+        url = '/raw/%s/examples' % upload
+        rv = client.get(url, headers=auth_headers)
         assert rv.status_code == 404
         data = json.loads(rv.data)
         assert len(data['files']) == 5
 
     @pytest.mark.parametrize('compress', [True, False])
-    def test_raw_file_wildcard(self, client, example_upload_hash, compress):
-        url = '/raw/%s/data/examples*' % example_upload_hash
+    @UploadFilesBasedTests.ignore_authorization
+    def test_raw_file_wildcard(self, client, upload, auth_headers, compress):
+        url = '/raw/%s/examples*' % upload
         if compress:
             url = '%s?compress=1' % url
-        rv = client.get(url)
+        rv = client.get(url, headers=auth_headers)
 
         assert rv.status_code == 200
         assert len(rv.data) > 0
@@ -439,22 +621,37 @@ class TestRaw:
             assert zip_file.testzip() is None
             assert len(zip_file.namelist()) == len(example_file_contents)
 
-    def test_raw_file_wildcard_missing(self, client, example_upload_hash):
-        url = '/raw/%s/does/not/exist*' % example_upload_hash
-        rv = client.get(url)
+    @UploadFilesBasedTests.ignore_authorization
+    def test_raw_file_wildcard_missing(self, client, upload, auth_headers):
+        url = '/raw/%s/does/not/exist*' % upload
+        rv = client.get(url, headers=auth_headers)
         assert rv.status_code == 404
 
-    def test_raw_file_missing_upload(self, client, example_upload_hash):
+    @UploadFilesBasedTests.ignore_authorization
+    def test_raw_file_missing_upload(self, client, upload, auth_headers):
         url = '/raw/doesnotexist/%s' % example_file_mainfile
-        rv = client.get(url)
+        rv = client.get(url, headers=auth_headers)
         assert rv.status_code == 404
 
     @pytest.mark.parametrize('compress', [True, False])
-    def test_raw_files(self, client, example_upload_hash, compress):
+    @UploadFilesBasedTests.check_authorizaton
+    def test_raw_files(self, client, upload, auth_headers, compress):
         url = '/raw/%s?files=%s' % (
-            example_upload_hash, ','.join(['data/%s' % file for file in example_file_contents]))
+            upload, ','.join(example_file_contents))
         if compress:
             url = '%s&compress=1' % url
+        rv = client.get(url, headers=auth_headers)
+
+        assert rv.status_code == 200
+        assert len(rv.data) > 0
+        with zipfile.ZipFile(io.BytesIO(rv.data)) as zip_file:
+            assert zip_file.testzip() is None
+            assert len(zip_file.namelist()) == len(example_file_contents)
+
+    @UploadFilesBasedTests.ignore_authorization
+    def test_raw_files_signed(self, client, upload, _, test_user_signature_token):
+        url = '/raw/%s?files=%s&token=%s' % (
+            upload, ','.join(example_file_contents), test_user_signature_token)
         rv = client.get(url)
 
         assert rv.status_code == 200
@@ -464,12 +661,13 @@ class TestRaw:
             assert len(zip_file.namelist()) == len(example_file_contents)
 
     @pytest.mark.parametrize('compress', [True, False, None])
-    def test_raw_files_post(self, client, example_upload_hash, compress):
-        url = '/raw/%s' % example_upload_hash
-        data = dict(files=['data/%s' % file for file in example_file_contents])
+    @UploadFilesBasedTests.check_authorizaton
+    def test_raw_files_post(self, client, upload, auth_headers, compress):
+        url = '/raw/%s' % upload
+        data = dict(files=example_file_contents)
         if compress is not None:
             data.update(compress=compress)
-        rv = client.post(url, data=json.dumps(data), content_type='application/json')
+        rv = client.post(url, data=json.dumps(data), content_type='application/json', headers=auth_headers)
 
         assert rv.status_code == 200
         assert len(rv.data) > 0
@@ -478,11 +676,12 @@ class TestRaw:
             assert len(zip_file.namelist()) == len(example_file_contents)
 
     @pytest.mark.parametrize('compress', [True, False])
-    def test_raw_files_missing_file(self, client, example_upload_hash, compress):
-        url = '/raw/%s?files=data/%s,missing/file.txt' % (example_upload_hash, example_file_mainfile)
+    @UploadFilesBasedTests.ignore_authorization
+    def test_raw_files_missing_file(self, client, upload, auth_headers, compress):
+        url = '/raw/%s?files=%s,missing/file.txt' % (upload, example_file_mainfile)
         if compress:
             url = '%s&compress=1' % url
-        rv = client.get(url)
+        rv = client.get(url, headers=auth_headers)
 
         assert rv.status_code == 200
         assert len(rv.data) > 0
@@ -490,8 +689,15 @@ class TestRaw:
             assert zip_file.testzip() is None
             assert len(zip_file.namelist()) == 1
 
-    def test_raw_files_missing_upload(self, client, example_upload_hash):
+    @UploadFilesBasedTests.ignore_authorization
+    def test_raw_files_missing_upload(self, client, upload, auth_headers):
         url = '/raw/doesnotexist?files=shoud/not/matter.txt'
-        rv = client.get(url)
+        rv = client.get(url, headers=auth_headers)
 
         assert rv.status_code == 404
+
+
+def test_docs(client):
+    rv = client.get('/docs/index.html')
+    rv = client.get('/docs/introduction.html')
+    assert rv.status_code == 200
diff --git a/tests/test_client.py b/tests/test_client.py
new file mode 100644
index 0000000000000000000000000000000000000000..95c63484339077fc56e1af2aa587cf366b6c7cbe
--- /dev/null
+++ b/tests/test_client.py
@@ -0,0 +1,51 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+from bravado.client import SwaggerClient
+import time
+
+from nomad.processing import SUCCESS
+
+from tests.test_files import example_file, create_public_upload, clear_files  # noqa pylint: disable=unused-import
+from tests.test_api import client as flask_client, test_user_auth  # noqa pylint: disable=unused-import
+from tests.bravado_flaks import FlaskTestHttpClient
+
+
+@pytest.fixture(scope='function')
+def client(flask_client, repository_db, test_user_auth):
+    http_client = FlaskTestHttpClient(flask_client, headers=test_user_auth)
+    return SwaggerClient.from_url('/swagger.json', http_client=http_client)
+
+
+def test_get_upload_command(client):
+    assert client.uploads.get_upload_command().response().result.upload_command is not None
+
+
+def test_upload(client, worker):
+    with open(example_file, 'rb') as f:
+        upload = client.uploads.upload(file=f, name='test_upload').response().result
+
+    while upload.tasks_running:
+        upload = client.uploads.get_upload(upload_id=upload.upload_id).response().result
+        time.sleep(0.1)
+
+    assert upload.tasks_status == SUCCESS
+
+
+def test_get_repo_calc(client, clear_files):
+    create_public_upload('test_upload', 'pp')
+    repo = client.repo.get_repo_calc(upload_id='test_upload', calc_id='0').response().result
+    assert repo is not None
+    assert repo['calc_id'] is not None
diff --git a/tests/test_coe_repo.py b/tests/test_coe_repo.py
index 909194e2fac2f0bf65cae1bbfaeb8fed1c3386a2..520e70a34550717241a8e82537a9058708d6309e 100644
--- a/tests/test_coe_repo.py
+++ b/tests/test_coe_repo.py
@@ -1,7 +1,21 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 import pytest
-import json
+import datetime
 
-from nomad.coe_repo import User, Calc, CalcMetaData, StructRatio, Upload, add_upload
+from nomad.coe_repo import User, Calc, Upload
 
 from tests.processing.test_data import processed_upload  # pylint: disable=unused-import
 from tests.processing.test_data import uploaded_id  # pylint: disable=unused-import
@@ -16,7 +30,7 @@ def assert_user(user, reference):
 
 
 def test_token_authorize(test_user):
-    user = User.verify_auth_token(test_user.email)
+    user = User.verify_auth_token(test_user.first_name.lower())
     assert_user(user, test_user)
 
 
@@ -25,33 +39,94 @@ def test_password_authorize(test_user):
     assert_user(user, test_user)
 
 
-def assert_coe_upload(upload_hash, repository_db, empty=False):
-    coe_upload = repository_db.query(Upload).filter_by(upload_name=upload_hash).first()
+def assert_coe_upload(upload_id, empty=False, metadata={}):
+    coe_upload = Upload.from_upload_id(upload_id)
+
     if empty:
         assert coe_upload is None
     else:
         assert coe_upload is not None
-        coe_upload_id = coe_upload.upload_id
-        for calc in repository_db.query(Calc).filter_by(origin_id=coe_upload_id):
-            assert calc.origin_id == coe_upload_id
-            metadata = repository_db.query(CalcMetaData).filter_by(calc_id=calc.calc_id).first()
-            assert metadata is not None
-            assert metadata.chemical_formula is not None
-            filenames = metadata.filenames.decode('utf-8')
-            assert len(json.loads(filenames)) == 5
+        assert len(coe_upload.calcs) > 0
+        for calc in coe_upload.calcs:
+            assert_coe_calc(calc, metadata=metadata)
+
+        if '_upload_time' in metadata:
+            assert coe_upload.created.isoformat()[:26] == metadata['_upload_time']
+
+
+def assert_coe_calc(calc: Calc, metadata={}):
+    assert int(calc.pid) == int(metadata.get('_pid', calc.pid))
+    assert calc.calc_id == metadata.get('_checksum', calc.calc_id)
+
+    # calc data
+    assert len(calc.filenames) == 5
+    assert calc.chemical_formula is not None
+
+    # user meta data
+    assert calc.comment == metadata.get('comment', None)
+    assert sorted(calc.references) == sorted(metadata.get('references', []))
+    assert calc.uploader is not None
+    assert calc.uploader.user_id == metadata.get('_uploader', calc.uploader.user_id)
+    assert sorted(user.user_id for user in calc.coauthors) == sorted(metadata.get('coauthors', []))
+    assert sorted(user.user_id for user in calc.shared_with) == sorted(metadata.get('shared_with', []))
+    assert calc.with_embargo == metadata.get('with_embargo', False)
+
+
+@pytest.mark.timeout(10)
+def test_add_upload(clean_repository_db, processed_upload):
+    empty = processed_upload.total_calcs == 0
 
-            struct_ratio = repository_db.query(StructRatio).filter_by(calc_id=calc.calc_id).first()
-            assert struct_ratio is not None
-            assert struct_ratio.chemical_formula == metadata.chemical_formula
-            assert struct_ratio.formula_units == 1
+    Upload.add(processed_upload)
+    assert_coe_upload(processed_upload.upload_id, empty=empty)
 
 
 @pytest.mark.timeout(10)
-def test_add_upload(repository_db, processed_upload):
-    coe_upload_id = add_upload(processed_upload, restricted=False)
-    if coe_upload_id:
-        assert_coe_upload(processed_upload.upload_hash, repository_db)
-
-    coe_upload_id = add_upload(processed_upload, restricted=False)
-    if coe_upload_id:
-        assert_coe_upload(processed_upload.upload_hash, repository_db)
+def test_add_upload_metadata(clean_repository_db, processed_upload, other_test_user, test_user):
+    empty = processed_upload.total_calcs == 0
+
+    metadata = {
+        'comment': 'test comment',
+        'with_embargo': True,
+        'references': ['http://external.ref/one', 'http://external.ref/two'],
+        '_uploader': other_test_user.user_id,
+        'coauthors': [test_user.user_id],
+        '_checksum': '1',
+        '_upload_time': datetime.datetime.now().isoformat(),
+        '_pid': 256
+    }
+
+    Upload.add(processed_upload, metadata=metadata)
+    assert_coe_upload(processed_upload.upload_id, empty=empty, metadata=metadata)
+
+
+class TestDataSets:
+
+    @pytest.fixture(scope='function')
+    def datasets(self, clean_repository_db):
+        clean_repository_db.begin()
+        one = Calc()
+        two = Calc()
+        three = Calc()
+        clean_repository_db.add(one)
+        clean_repository_db.add(two)
+        clean_repository_db.add(three)
+        one.children.append(two)
+        two.children.append(three)
+        clean_repository_db.commit()
+
+        return one, two, three
+
+    def assert_datasets(self, datasets, id_list):
+        assert sorted([ds.id for ds in datasets]) == sorted(id_list)
+
+    def test_all(self, datasets):
+        one, two, three = datasets
+        self.assert_datasets(one.all_datasets, [])
+        self.assert_datasets(two.all_datasets, [one.coe_calc_id])
+        self.assert_datasets(three.all_datasets, [one.coe_calc_id, two.coe_calc_id])
+
+    def test_direct(self, datasets):
+        one, two, three = datasets
+        self.assert_datasets(one.direct_datasets, [])
+        self.assert_datasets(two.direct_datasets, [one.coe_calc_id])
+        self.assert_datasets(three.direct_datasets, [two.coe_calc_id])
diff --git a/tests/test_files.py b/tests/test_files.py
index 8475dd5d3dd03319c6604e25d96aef8c9c742040..888b447cabd15d71a8f23317249e02dff6870b72 100644
--- a/tests/test_files.py
+++ b/tests/test_files.py
@@ -12,16 +12,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import pytest
-import json
-import shutil
+from typing import Generator, Any, Dict
 import os
 import os.path
-from zipfile import ZipFile
+import shutil
+import pytest
+import json
 
-from nomad.files import Objects, ObjectFile, ArchiveFile, UploadFile, ArchiveLogFile, \
-    BaggedDataContainer, ZippedDataContainer
 from nomad import config
+from nomad.files import DirectoryObject, PathObject
+from nomad.files import Metadata, PublicMetadata, StagingMetadata
+from nomad.files import StagingUploadFiles, PublicUploadFiles, UploadFiles, Restricted, \
+    ArchiveBasedStagingUploadFiles
+
 
 # example_file uses an artificial parser for faster test execution, can also be
 # changed to examples_vasp.zip for using vasp parser
@@ -56,235 +59,340 @@ def clear_files():
 
 
 class TestObjects:
-    @pytest.fixture()
-    def existing_example_file(self, clear_files):
-        with ObjectFile(example_bucket, 'example_file', ext='json').open(mode='wt') as out:
-            json.dump(example_data, out)
-
-        yield 'example_file', 'json'
-
-    def test_size(self, existing_example_file):
-        name, ext = existing_example_file
-        assert ObjectFile(example_bucket, name, ext).size > 0
-
-    def test_exists(self, existing_example_file):
-        name, ext = existing_example_file
-        assert ObjectFile(example_bucket, name, ext).exists()
-
-    def test_not_exists(self):
-        assert not ObjectFile(example_bucket, 'does_not_exist').exists()
-
-    def test_open(self, existing_example_file):
-        name, ext = existing_example_file
-
-        assert ObjectFile(example_bucket, name, ext).exists()
-        with ObjectFile(example_bucket, name, ext=ext).open() as f:
-            json.load(f)
-
-    def test_delete(self, existing_example_file):
-        name, ext = existing_example_file
-        ObjectFile(example_bucket, name, ext).delete()
-        assert not ObjectFile(example_bucket, name, ext).exists()
-
-    def test_delete_all(self, existing_example_file):
-        name, ext = existing_example_file
-        Objects.delete_all(example_bucket)
-        assert not ObjectFile(example_bucket, name, ext).exists()
-
-
-class TestBaggedDataContainer:
 
     @pytest.fixture(scope='function')
-    def example_directory(self, clear_files):
-        directory = os.path.join(config.fs.tmp, 'test_container')
-        os.makedirs(directory, exist_ok=True)
-
-        with ZipFile(example_file) as zip_file:
-            zip_file.extractall(directory)
-
-        yield directory
-
-    @pytest.fixture(scope='function')
-    def example_container(self, example_directory):
-        yield BaggedDataContainer.create(example_directory)
-
-    def assert_container(self, container):
-        assert container.manifest is not None
-        assert len(container.manifest) == 5
-        assert container.hash is not None
-        assert container.metadata is not None
-        for file_path in container.manifest:
-            assert file_path.startswith('examples_template')
-
-    def test_make(self, example_container):
-        self.assert_container(example_container)
-
-    def test_metadata(self, example_directory, example_container):
-        example_container.metadata['test'] = dict(k1='v1', k2=True, k3=0)
-        example_container.save_metadata()
-
-        example_container = BaggedDataContainer(example_directory)
-        self.assert_container(example_container)
-        assert example_container.metadata['test']['k1'] == 'v1'
-        assert example_container.metadata['test']['k2']
-        assert example_container.metadata['test']['k3'] == 0
-
-    def test_file(self, example_container):
-        file = example_container.get_file('examples_template/template.json')
-        assert file is not None
-        with file.open('r') as f:
-            assert json.load(f)
-
-
-class TestZippedDataContainer(TestBaggedDataContainer):
+    def test_bucket(self):
+        yield 'test_bucket'
+
+        bucket = os.path.join(config.fs.objects, 'test_bucket')
+        if os.path.exists(bucket):
+            shutil.rmtree(os.path.join(config.fs.objects, 'test_bucket'))
+
+    def test_file_dir_existing(self, test_bucket):
+        file = PathObject(test_bucket, 'sub/test_id')
+        assert not os.path.exists(os.path.dirname(file.os_path))
+
+    @pytest.mark.parametrize('dirpath', ['test', os.path.join('sub', 'test')])
+    @pytest.mark.parametrize('create', [True, False])
+    @pytest.mark.parametrize('prefix', [True, False])
+    def test_directory(self, test_bucket: str, dirpath: str, create: bool, prefix: bool) -> None:
+        directory = DirectoryObject(test_bucket, dirpath, create=create, prefix=prefix)
+        assert directory.exists() == create
+        assert os.path.isdir(directory.os_path) == create
+        assert directory.os_path.endswith(os.path.join('tes' if prefix else '', 'test'))
+
+    @pytest.mark.parametrize('dirpath', ['test', os.path.join('sub', 'test')])
+    @pytest.mark.parametrize('create', [True, False])
+    @pytest.mark.parametrize('join_create', [True, False])
+    @pytest.mark.parametrize('prefix', [True, False])
+    def test_directory_join(self, test_bucket: str, dirpath: str, create: bool, prefix: bool, join_create: bool) -> None:
+        directory = DirectoryObject(test_bucket, 'parent', create=create, prefix=prefix)
+        directory = directory.join_dir(dirpath, create=join_create)
+
+        assert directory.exists() == join_create
+        assert os.path.isdir(directory.os_path) == join_create
+        assert dirpath.endswith(os.path.join('', 'test'))
+
+    @pytest.mark.parametrize('filepath', ['test', 'sub/test'])
+    @pytest.mark.parametrize('create', [True, False])
+    def test_directory_join_file_dir_create(self, test_bucket: str, filepath: str, create: bool):
+        directory = DirectoryObject(test_bucket, 'parent', create=create)
+        file = directory.join_file(filepath)
+        assert os.path.exists(directory.os_path) == create
+        assert os.path.exists(os.path.dirname(file.os_path)) == create
+
+
+example_calc: Dict[str, Any] = {
+    'calc_id': '0',
+    'mainfile': 'examples_template/template.json',
+    'data': 'value'
+}
+example_calc_id = example_calc['calc_id']
+
+
+def assert_example_calc(calc):
+    assert calc is not None
+    assert calc['data'] == example_calc['data']
+
+
+class MetadataContract:
     @pytest.fixture(scope='function')
-    def example_container(self, example_directory):
-        BaggedDataContainer.create(example_directory)
-        return ZippedDataContainer.create(example_directory)
-
-    def test_metadata(self, example_directory, example_container):
-        pass
-
-    def test_target(self, example_directory):
-        BaggedDataContainer.create(example_directory)
-        target = os.path.join(os.path.dirname(example_directory), 'different.zip')
-        container = ZippedDataContainer.create(example_directory, target=target)
-        self.assert_container(container)
-        with ZipFile(target, 'r') as zip_file:
-            for info in zip_file.filelist:
-                assert info.filename.startswith('different')
-
-
-@pytest.fixture(scope='function', params=[False, True])
-def archive_config(monkeypatch, request):
-    new_config = config.FilesConfig(
-        config.files.uploads_bucket,
-        config.files.raw_bucket,
-        config.files.archive_bucket,
-        request.param)
-    monkeypatch.setattr(config, 'files', new_config)
-    yield
-
-
-@pytest.fixture(scope='function')
-def archive(clear_files, archive_config):
-    archive = ArchiveFile('__test_upload_hash/__test_calc_hash')
-    with archive.write_archive_json() as out:
-        json.dump(example_data, out)
-    yield archive
-
-
-class TestArchiveFile:
-
-    def test_archive(self, archive: ArchiveFile, no_warn):
-        assert archive.exists()
-
-        with archive.read_archive_json() as file:
-            result = json.load(file)
-
-        assert 'test_key' in result
-        assert result['test_key'] == 'test_value'
+    def test_dir(self):
+        path = os.path.join(config.fs.tmp, 'test_dir')
+        os.makedirs(path)
+        yield path
+        shutil.rmtree(path)
 
-    def test_delete_archive(self, archive: ArchiveFile, no_warn):
-        archive.delete()
-        assert not archive.exists()
-
-    def test_delete_archives(self, archive: ArchiveFile, no_warn):
-        ArchiveFile.delete_archives(archive.object_id.split('/')[0])
-        assert not archive.exists()
-
-
-class TestUploadFile:
+    @pytest.fixture(scope='function')
+    def md(self, test_dir):
+        raise NotImplementedError()
 
-    @pytest.fixture()
-    def upload_same_file(self, clear_files):
-        upload = UploadFile('__test_upload_id2')
-        shutil.copyfile(example_file, upload.os_path)
-        yield upload
+    def test_get(self, md: Metadata):
+        assert_example_calc(md.get(example_calc_id))
 
-    @pytest.fixture()
-    def upload(self, clear_files):
-        upload = UploadFile('__test_upload_id')
-        upload.create_dirs()
-        shutil.copyfile(example_file, upload.os_path)
-        yield upload
+    def test_get_fail(self, md: Metadata):
+        failed = False
+        try:
+            md.get('unknown')
+        except KeyError:
+            failed = True
+        assert failed
 
-    def assert_upload(self, upload: UploadFile):
-        assert upload.exists()
 
-        assert len(upload.filelist) == 5
-        has_json = False
-        for filename in upload.filelist:
-            the_file = upload.get_file(filename)
-            assert the_file.exists()
-            assert the_file.size >= 0
-            if the_file.path.endswith('.json'):
-                has_json = True
-                assert the_file.size > 0
-                with the_file.open() as f:
-                    f.read()
-                break
-        assert has_json
+class TestStagingMetadata(MetadataContract):
+    @pytest.fixture(scope='function')
+    def md(self, test_dir):
+        md = StagingMetadata(DirectoryObject(None, None, os_path=test_dir))
+        md.insert(example_calc)
+        return md
+
+    def test_remove(self, md: StagingMetadata):
+        md.remove(example_calc)
+        failed = False
+        try:
+            assert md.get(example_calc['calc_id'])
+        except KeyError:
+            failed = True
+        assert failed
+
+    def test_insert(self, md: StagingMetadata):
+        md.remove(example_calc)
+        md.insert(example_calc)
+        assert len(md) == 1
+        assert_example_calc(md.get(example_calc_id))
+
+    def test_insert_fail(self, md: StagingMetadata):
+        failed = False
+        try:
+            md.insert(example_calc)
+        except Exception:
+            failed = True
 
-    def test_upload_extracted(self, upload: UploadFile):
-        with upload:
-            self.assert_upload(upload)
+        assert failed
+        assert len(md) == 1
 
-    def test_persist(self, upload: UploadFile):
-        with upload:
-            zipped_container = upload.persist()
+    def test_update(self, md: StagingMetadata):
+        md.update(example_calc_id, dict(data='updated'))
+        assert len(md) == 1
+        assert md.get(example_calc_id)['data'] == 'updated'
 
-        assert zipped_container.exists()
-        assert zipped_container.os_path.endswith('%s.zip' % upload.upload_hash())
+    def test_update_fail(self, md: StagingMetadata):
+        failed = False
+        try:
+            md.update('unknown', dict(data='updated'))
+        except KeyError:
+            failed = True
+        assert failed
+        assert len(md) == 1
 
-    def test_delete_upload(self, upload: UploadFile):
-        upload.delete()
-        assert not upload.exists()
 
-    def test_hash(self, upload: UploadFile, upload_same_file: UploadFile, no_warn):
-        with upload:
-            hash = upload.upload_hash()
-            assert hash is not None
-            assert isinstance(hash, str)
+class TestPublicMetadata(MetadataContract):
 
-        with upload_same_file:
-            assert hash == upload_same_file.upload_hash()
+    @pytest.fixture(scope='function')
+    def md(self, test_dir):
+        md = PublicMetadata(test_dir)
+        md._create([example_calc])
+        return md
 
-    def test_siblings(self, upload: UploadFile, no_warn):
-        with upload:
-            siblings = list(upload.get_siblings('examples_template/template.json'))
-            assert len(siblings) == 4
-            assert all(sibling.endswith('.aux') for sibling in siblings)
 
+class UploadFilesFixtures:
 
-class TestLocalUploadFile(TestUploadFile):
-    @pytest.fixture()
-    def upload_same_file(self, clear_files):
-        upload = UploadFile('__test_upload_id2', local_path=example_file)
-        yield upload
+    @pytest.fixture(scope='function')
+    def test_upload_id(self) -> Generator[str, None, None]:
+        for bucket in [config.files.staging_bucket, config.files.public_bucket]:
+            directory = DirectoryObject(bucket, 'test_upload', prefix=True)
+            if directory.exists():
+                directory.delete()
+        yield 'test_upload'
+        for bucket in [config.files.staging_bucket, config.files.public_bucket]:
+            directory = DirectoryObject(bucket, 'test_upload', prefix=True)
+            if directory.exists():
+                directory.delete()
 
-    @pytest.fixture()
-    def upload(self, clear_files):
-        upload = UploadFile('__test_upload_id', local_path=example_file)
-        yield upload
 
-    def test_delete_upload(self, upload: UploadFile):
-        upload.delete()
-        assert upload.exists()
+class UploadFilesContract(UploadFilesFixtures):
 
+    @pytest.fixture(scope='function', params=['r'])
+    def test_upload(self, request, test_upload_id) -> UploadFiles:
+        raise NotImplementedError()
 
-@pytest.fixture(scope='function')
-def archive_log(clear_files, archive_config):
-    archive_log = ArchiveLogFile('__test_upload_hash/__test_calc_hash')
-    with archive_log.open('wt') as f:
-        f.write('This is a test')
+    @pytest.fixture(scope='function')
+    def empty_test_upload(self, test_upload_id) -> Generator[UploadFiles, None, None]:
+        raise NotImplementedError()
 
-    yield archive_log
+    def test_create(self, empty_test_upload):
+        assert UploadFiles.get(empty_test_upload.upload_id).__class__ == empty_test_upload.__class__
 
+    def test_rawfile(self, test_upload):
+        try:
+            with test_upload.raw_file(example_file_mainfile) as f:
+                assert len(f.read()) > 0
+            if not test_upload._is_authorized():
+                assert not test_upload.metadata.get(example_calc_id).get('restricted', False)
+        except Restricted:
+            assert not test_upload._is_authorized()
+            assert test_upload.metadata.get(example_calc_id).get('restricted', False)
+
+    @pytest.mark.parametrize('prefix', [None, 'examples'])
+    def test_raw_file_manifest(self, test_upload: StagingUploadFiles, prefix: str):
+        raw_files = list(test_upload.raw_file_manifest(path_prefix=prefix))
+        assert sorted(file for file in raw_files if file.startswith('examples')) == sorted(example_file_contents)
+
+    @pytest.mark.parametrize('test_logs', [True, False])
+    def test_archive(self, test_upload, test_logs: bool):
+        try:
+            if test_logs:
+                with test_upload.archive_log_file(example_calc_id, 'rt') as f:
+                    assert f.read() == 'archive'
+            else:
+                f = test_upload.archive_file(example_calc_id, 'rt')
+                assert json.load(f) == 'archive'
+
+            if not test_upload._is_authorized():
+                assert not test_upload.metadata.get(example_calc_id).get('restricted', False)
+        except Restricted:
+            assert not test_upload._is_authorized()
+            assert test_upload.metadata.get(example_calc_id).get('restricted', False)
+
+    def test_metadata(self, test_upload):
+        assert_example_calc(test_upload.metadata.get(example_calc_id))
+
+
+def create_staging_upload(upload_id: str, calc_specs: str) -> StagingUploadFiles:
+    """
+    Create an upload according to given spec. Additional arguments are given to
+    the StagingUploadFiles contstructor.
+
+    Arguments:
+        upload_id: The id that should be given to this test upload.
+        calc_specs: A string that determines the properties of the given upload.
+            With letters determining example calcs being public `p` or restricted `p`.
+            The calcs will be copies of calcs in `example_file`.
+            First calc is at top level, following calcs will be put under 1/, 2/, etc.
+    """
+    upload = StagingUploadFiles(upload_id, create=True, is_authorized=lambda: True)
+
+    prefix = 0
+    for calc_spec in calc_specs:
+        upload.add_rawfiles(example_file, prefix=None if prefix == 0 else str(prefix))
+        calc_id = str(int(example_calc_id) + prefix)
+        with upload.archive_file(calc_id, 'wt') as f:
+            f.write('"archive"')
+        with upload.archive_log_file(calc_id, 'wt') as f:
+            f.write('archive')
+        calc = dict(**example_calc)
+        calc['calc_id'] = calc_id
+        if prefix > 0:
+            calc['mainfile'] = os.path.join(str(prefix), calc['mainfile'])
+        if calc_spec == 'r':
+            calc['restricted'] = True
+        elif calc_spec == 'p':
+            calc['restricted'] = False
+        upload.metadata.insert(calc)
+        prefix += 1
+
+    if calc_specs.startswith('P'):
+        public_only = True
+        calc_specs = calc_specs[1:]
+    else:
+        public_only = False
+    upload._is_authorized = lambda: not public_only
+
+    assert len(upload.metadata) == len(calc_specs)
+    return upload
+
+
+class TestStagingUploadFiles(UploadFilesContract):
+
+    @pytest.fixture(scope='function', params=['r', 'rr', 'pr', 'rp', 'p', 'pp'])
+    def test_upload(self, request, test_upload_id: str) -> StagingUploadFiles:
+        return create_staging_upload(test_upload_id, calc_specs=request.param)
 
-class TestArchiveLogFile:
+    @pytest.fixture(scope='function')
+    def empty_test_upload(self, test_upload_id) -> Generator[UploadFiles, None, None]:
+        yield StagingUploadFiles(test_upload_id, create=True, is_authorized=lambda: True)
+
+    @pytest.mark.parametrize('prefix', [None, 'prefix'])
+    def test_add_rawfiles_zip(self, test_upload_id, prefix):
+        test_upload = StagingUploadFiles(test_upload_id, create=True, is_authorized=lambda: True)
+        test_upload.add_rawfiles(example_file, prefix=prefix)
+        for filepath in example_file_contents:
+            filepath = os.path.join(prefix, filepath) if prefix else filepath
+            with test_upload.raw_file(filepath) as f:
+                content = f.read()
+                if filepath == example_file_mainfile:
+                    assert len(content) > 0
+
+    def test_write_archive(self, test_upload):
+        assert json.load(test_upload.archive_file(example_calc_id, 'rt')) == 'archive'
+
+    def test_calc_id(self, test_upload):
+        assert test_upload.calc_id(example_file_mainfile) is not None
+
+    def test_pack(self, test_upload):
+        test_upload.pack()
+
+    @pytest.mark.parametrize('with_mainfile', [True, False])
+    def test_calc_files(self, test_upload: StagingUploadFiles, with_mainfile):
+        for calc in test_upload.metadata:
+            mainfile = calc['mainfile']
+            calc_files = test_upload.calc_files(mainfile, with_mainfile=with_mainfile)
+            assert len(list(calc_files)) == len(example_file_contents) - 0 if with_mainfile else 1
+            if with_mainfile:
+                for one, two in zip(calc_files, [mainfile] + sorted(example_file_contents[1:])):
+                    assert one.endswith(two)
+                    assert one.startswith(mainfile[:3])
+
+    def test_delete(self, test_upload: StagingUploadFiles):
+        test_upload.delete()
+        assert not test_upload.exists()
+
+    def test_update_metadata(self, test_upload):
+        test_upload.metadata.update(example_calc_id, dict(data='updated'))
+        test_upload.metadata.get(example_calc_id)['data'] == 'updated'
+
+
+class TestArchiveBasedStagingUploadFiles(UploadFilesFixtures):
+    def test_create(self, test_upload_id):
+        test_upload = ArchiveBasedStagingUploadFiles(test_upload_id, create=True)
+        shutil.copy(example_file, test_upload.upload_file_os_path)
+        test_upload.extract()
+        assert sorted(list(test_upload.raw_file_manifest())) == sorted(example_file_contents)
+        assert os.path.exists(test_upload.upload_file_os_path)
+
+    def test_local_path(self, test_upload_id):
+        test_upload = ArchiveBasedStagingUploadFiles(test_upload_id, create=True, local_path=example_file)
+        test_upload.extract()
+        assert sorted(list(test_upload.raw_file_manifest())) == sorted(example_file_contents)
+        assert os.path.exists(test_upload.upload_file_os_path)
+
+    def test_invalid(self, test_upload_id):
+        assert ArchiveBasedStagingUploadFiles(test_upload_id, create=True, local_path=example_file).is_valid
+        assert not ArchiveBasedStagingUploadFiles(test_upload_id, create=True).is_valid
+
+
+def create_public_upload(upload_id: str, calc_specs: str, **kwargs):
+    staging_upload = create_staging_upload(upload_id, calc_specs)
+    staging_upload.pack()
+    staging_upload.delete()
+    return PublicUploadFiles(upload_id, **kwargs)
+
+
+class TestPublicUploadFiles(UploadFilesContract):
 
-    def test_archive_log_file(self, archive_log):
-        assert archive_log.exists()
-        with archive_log.open('rt') as f:
-            assert 'test' in f.read()
+    @pytest.fixture(scope='function')
+    def empty_test_upload(self, test_upload_id: str) -> Generator[UploadFiles, None, None]:
+        yield create_public_upload(test_upload_id, calc_specs='', is_authorized=lambda: True)
+
+    @pytest.fixture(scope='function', params=['r', 'rr', 'pr', 'rp', 'p', 'pp', 'Ppr', 'Prp'])
+    def test_upload(self, request, test_upload_id: str) -> PublicUploadFiles:
+        calc_specs = request.param
+        if calc_specs.startswith('P'):
+            public_only = True
+            calc_specs = calc_specs[1:]
+        else:
+            public_only = False
+
+        staging_upload = create_staging_upload(test_upload_id, calc_specs=calc_specs)
+        staging_upload.pack()
+        return PublicUploadFiles(test_upload_id, is_authorized=lambda: not public_only)
diff --git a/tests/test_migration.py b/tests/test_migration.py
new file mode 100644
index 0000000000000000000000000000000000000000..fd4503f2a2901ba08cd852ed1aab045fdd5ae565
--- /dev/null
+++ b/tests/test_migration.py
@@ -0,0 +1,193 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+import os
+import os.path
+from bravado.client import SwaggerClient
+import json
+
+from nomad import infrastructure, coe_repo
+
+from nomad.migration import NomadCOEMigration, SourceCalc
+from nomad.infrastructure import repository_db_connection
+
+from .bravado_flaks import FlaskTestHttpClient
+from tests.conftest import create_repository_db
+from tests.test_api import client as flask_client, create_auth_headers  # noqa pylint: disable=unused-import
+from tests.test_client import client as bravado_client  # noqa pylint: disable=unused-import
+
+test_source_db_name = 'test_nomad_fair_migration_source'
+test_target_db_name = 'test_nomad_fair_migration_target'
+
+
+@pytest.fixture(scope='module')
+def source_repo(monkeysession, repository_db):
+    """
+    Fixture for an example migration source db with:
+    - two user
+    - two calculations (1 per user)
+    - one calculation with all metadata (dataset, ref, comment, coauther, sharewith)
+    """
+    try:
+        with repository_db_connection(dbname='postgres', with_trans=False) as con:
+            with con.cursor() as cursor:
+                cursor.execute("CREATE DATABASE %s ;" % test_source_db_name)
+    except Exception:
+        pass
+
+    with repository_db_connection(dbname=test_source_db_name, with_trans=False) as con:
+        with con.cursor() as cur:
+            cur.execute(
+                'DROP SCHEMA IF EXISTS public CASCADE;'
+                'CREATE SCHEMA IF NOT EXISTS public;'
+                'GRANT ALL ON SCHEMA public TO postgres;'
+                'GRANT ALL ON SCHEMA public TO public;')
+
+            schema_sql_file, example_data_sql_file = (
+                os.path.join(os.path.dirname(infrastructure.__file__), 'empty_repository_db.sql'),
+                os.path.join('tests', 'data', 'migration', 'example_source_db.sql'))
+
+            for sql_file in [schema_sql_file, example_data_sql_file]:
+                with open(sql_file, 'r') as f:
+                    cur.execute(f.read())
+
+    with create_repository_db(monkeysession, exists=True, readonly=True, dbname=test_source_db_name) as db:
+        yield db
+
+
+@pytest.fixture(scope='function')
+def target_repo(repository_db):
+    with create_repository_db(readonly=False, exists=False, dbname=test_target_db_name) as db:
+        db.execute('TRUNCATE users CASCADE;')
+        yield db
+        db.execute('TRUNCATE uploads CASCADE;')
+
+
+@pytest.fixture(scope='function')
+def migration(source_repo, target_repo):
+    migration = NomadCOEMigration(sites=['tests/data/migration'])
+    yield migration
+
+
+def test_copy_users(migration, target_repo):
+    migration.copy_users(target_repo)
+    assert target_repo.query(coe_repo.User).count() == 3
+    assert target_repo.query(coe_repo.User).filter_by(user_id=1).first().email == 'one'
+    assert target_repo.query(coe_repo.User).filter_by(user_id=2).first().email == 'two'
+
+
+def perform_index(migration, has_indexed, with_metadata, **kwargs):
+    has_source_calc = False
+    for source_calc, total in SourceCalc.index(migration.source, with_metadata=with_metadata, **kwargs):
+        assert source_calc.pid is not None
+        assert source_calc.mainfile in ['1/template.json', '2/template.json']
+        assert source_calc.upload == 'upload'
+        has_source_calc = True
+        assert total == 2
+
+    assert has_source_calc == has_indexed
+
+    test_calc = SourceCalc.objects(mainfile='1/template.json', upload='upload').first()
+    assert test_calc is not None
+
+    if with_metadata:
+        assert test_calc.metadata['uploader'] == 1
+        assert test_calc.metadata['comment'] == 'label1'
+
+
+@pytest.mark.parametrize('with_metadata', [False, True])
+def test_create_index(migration, mockmongo, with_metadata: bool):
+    perform_index(migration, has_indexed=True, drop=True, with_metadata=with_metadata)
+
+
+@pytest.mark.parametrize('with_metadata', [True, False])
+def test_update_index(migration, mockmongo, with_metadata: bool):
+    perform_index(migration, has_indexed=True, drop=True, with_metadata=with_metadata)
+    perform_index(migration, has_indexed=False, drop=False, with_metadata=with_metadata)
+
+
+@pytest.fixture(scope='function')
+def migrate_infra(migration, target_repo, flask_client, worker, monkeysession):
+    """
+    Parameters to test
+    - missing upload, extracted, archive, broken archive
+    - upload process failure
+    - upload with no parsable files
+    - calculations with process errors
+    - matching, non matching calculations
+    - to few calculations
+    - to many caclualtions
+    - not in the index
+
+    All with two calcs, two users (for coauthors)
+    """
+    # source repo is the infrastructure repo
+    indexed = list(migration.index(drop=True, with_metadata=True))
+    assert len(indexed) == 2
+    # source repo is the infrastructure repo
+    migration.copy_users(target_repo)
+
+    # target repo is the infrastructure repo
+    def create_client():
+        admin = target_repo.query(coe_repo.User).filter_by(email='admin').first()
+        http_client = FlaskTestHttpClient(flask_client, headers=create_auth_headers(admin))
+        return SwaggerClient.from_url('/swagger.json', http_client=http_client)
+
+    old_repo = infrastructure.repository_db
+    monkeysession.setattr('nomad.infrastructure.repository_db', target_repo)
+    monkeysession.setattr('nomad.client.create_client', create_client)
+
+    yield migration
+
+    monkeysession.setattr('nomad.infrastructure.repository_db', old_repo)
+
+
+mirgation_test_specs = [
+    ('baseline', dict(migrated=2, source=2)),
+    ('archive', dict(migrated=2, source=2)),
+    ('new_upload', dict(new=2)),
+    ('new_calc', dict(migrated=2, source=2, new=1)),
+    ('missing_calc', dict(migrated=1, source=2, missing=1)),
+    ('missmatch', dict(migrated=2, source=2, diffs=1)),
+    ('failed_calc', dict(migrated=1, source=2, diffs=0, missing=1, failed=1, errors=1)),
+    ('failed_upload', dict(migrated=0, source=2, missing=2, errors=1))
+]
+
+
+@pytest.mark.parametrize('test, assertions', mirgation_test_specs)
+@pytest.mark.timeout(30)
+def test_migrate(migrate_infra, test, assertions, caplog):
+    uploads_path = os.path.join('tests', 'data', 'migration', test)
+    reports = list(migrate_infra.migrate(
+        *[os.path.join(uploads_path, dir) for dir in os.listdir(uploads_path)]))
+
+    assert len(reports) == 1
+    report = reports[0]
+    assert report['total_calcs'] == assertions.get('migrated', 0) + assertions.get('new', 0) + assertions.get('failed', 0)
+
+    assert report['total_source_calcs'] == assertions.get('source', 0)
+    assert report['migrated_calcs'] == assertions.get('migrated', 0)
+    assert report['calcs_with_diffs'] == assertions.get('diffs', 0)
+    assert report['new_calcs'] == assertions.get('new', 0)
+    assert report['missing_calcs'] == assertions.get('missing', 0)
+
+    errors = 0
+    for record in caplog.get_records(when='call'):
+        if record.levelname in ['ERROR', 'CRITICAL']:
+            record_data = json.loads(record.getMessage())
+            if 'source_upload_id' in record_data:
+                errors += 1
+
+    assert errors == assertions.get('errors', 0)
diff --git a/tests/test_normalizing.py b/tests/test_normalizing.py
index ac3d621e6af0bc538230d21bfb0a3a391fe97941..95bad958875c57a6036c489c6d7682e499bdc6d9 100644
--- a/tests/test_normalizing.py
+++ b/tests/test_normalizing.py
@@ -51,20 +51,20 @@ def normalized_template_example(parsed_template_example) -> LocalBackend:
     return run_normalize(parsed_template_example)
 
 
+def test_template_example_normalizer(parsed_template_example, no_warn, caplog):
+    run_normalize(parsed_template_example)
+    print(str(caplog.records))
+
+
 def assert_normalized(backend):
-    # The assertions are based on the quanitites need for the repository.
-    assert backend.get_value('atom_species', 0) is not None
-    assert backend.get_value('system_type', 0) is not None
-    assert backend.get_value('chemical_composition', 0) is not None
-    assert backend.get_value('chemical_composition_bulk_reduced', 0) is not None
-    # The below tests are not always present for non-periodic
-    # cells that don't have a simulation_cell or lattice_vectors.
-    if backend.get_value('system_type', 0) not in ['Atom', 'Molecule / Cluster']:
-        assert backend.get_value('crystal_system', 0) is not None
-        assert backend.get_value('space_group_number', 0) is not None
-    # The NWChem example for MD does not have functional information in its output.
-    if backend.get_value('program_name', 0) != 'NWChem':
-        assert backend.get_value('XC_functional_name', 0) is not None
+    metadata = backend.metadata()['section_repository_info']['section_repository_parserdata']
+    count = 0
+    for metainfo in backend.metaInfoEnv().infoKindEls():
+        if 'section_repository_parserdata' in metainfo.superNames:
+            count += 1
+            assert backend.get_value(metainfo.name, 0) is not None
+            assert metadata.get(metainfo.name, None) is not None
+    assert count > 0
 
 
 def test_normalizer(normalized_example: LocalBackend, no_warn):
@@ -83,4 +83,3 @@ def test_normalizer_faulty_matid(
 
     assert_log(caplog, 'ERROR', unknown_class_error)
     assert_log(caplog, 'ERROR', wrong_class_for_no_sim_cell)
-
diff --git a/tests/test_parsing.py b/tests/test_parsing.py
index 7277308df193f1936ed87fb49d0d93ddf634292e..96a5ded65410e8d8a4e7b9aa659f802f165be7a5 100644
--- a/tests/test_parsing.py
+++ b/tests/test_parsing.py
@@ -59,6 +59,16 @@ class TestLocalBackend(object):
     def test_meta_info(self, meta_info, no_warn):
         assert 'section_topology' in meta_info
 
+    def test_metadata(self, backend, no_warn):
+        g_index = backend.openSection('section_calculation_info')
+        assert g_index == 0
+        backend.addValue('calc_id', 't0')
+        backend.closeSection('section_calculation_info', 0)
+        g_index = backend.openSection('section_repository_info')
+        backend.addValue('repository_calc_id', 1)
+        backend.closeSection('section_repository_info', 0)
+        assert json.dumps(backend.metadata()) is not None
+
     def test_section(self, backend, no_warn):
         g_index = backend.openSection('section_run')
         assert g_index == 0
@@ -226,7 +236,8 @@ def assert_parser_result(backend):
 
 def run_parser(parser_name, mainfile):
     parser = parser_dict[parser_name]
-    return parser.run(mainfile, logger=utils.get_logger(__name__))
+    result = parser.run(mainfile, logger=utils.get_logger(__name__))
+    return add_calculation_info(result)
 
 
 @pytest.fixture
@@ -249,9 +260,21 @@ def parsed_faulty_unknown_matid_example(caplog, request) -> LocalBackend:
 
 
 @pytest.fixture(params=parser_examples, ids=lambda spec: '%s-%s' % spec)
-def parsed_example(caplog, request) -> LocalBackend:
+def parsed_example(request) -> LocalBackend:
     parser_name, mainfile = request.param
-    return run_parser(parser_name, mainfile)
+    result = run_parser(parser_name, mainfile)
+    return result
+
+
+def add_calculation_info(backend: LocalBackend) -> LocalBackend:
+    backend.openNonOverlappingSection('section_calculation_info')
+    backend.addValue('upload_id', 'test_upload_id')
+    backend.addValue('calc_id', 'test_calc_id')
+    backend.addValue('calc_hash', 'test_calc_hash')
+    backend.addValue('main_file', 'test/mainfile.txt')
+    backend.addValue('parser_name', 'testParser')
+    backend.closeNonOverlappingSection('section_calculation_info')
+    return backend
 
 
 @pytest.mark.parametrize('parser_name, mainfile', parser_examples)
diff --git a/tests/test_repo.py b/tests/test_repo.py
deleted file mode 100644
index 7731b0355b9ebe5824a601a848b71ed9ab752109..0000000000000000000000000000000000000000
--- a/tests/test_repo.py
+++ /dev/null
@@ -1,126 +0,0 @@
-# Copyright 2018 Markus Scheidgen
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an"AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-from typing import Generator
-from datetime import datetime
-from elasticsearch import NotFoundError
-
-from nomad.files import ArchiveFile, UploadFile
-from nomad.parsing import LocalBackend
-from nomad.repo import AlreadyExists, RepoCalc
-
-from tests.test_files import example_file  # noqa
-from tests.test_normalizing import normalized_template_example  # pylint: disable=unused-import
-from tests.test_parsing import parsed_template_example  # pylint: disable=unused-import
-
-
-@pytest.fixture(scope='function')
-def example_elastic_calc(normalized_template_example: LocalBackend, elastic, test_user) \
-        -> Generator[RepoCalc, None, None]:
-
-    upload_file = UploadFile('test_upload_id', local_path=example_file)
-    mainfile = next(filename for filename in upload_file.filelist if 'template.json' in filename)
-    auxfiles = list(upload_file.get_siblings(mainfile))
-
-    try:
-        calc = RepoCalc.get(id='test_upload_hash/test_calc_hash')
-    except NotFoundError:
-        pass
-    else:
-        calc.delete()
-
-    entry = RepoCalc.create_from_backend(
-        normalized_template_example,
-        upload_hash='test_upload_hash',
-        calc_hash='test_calc_hash',
-        upload_id='test_upload_id',
-        additional=dict(
-            mainfile=mainfile,
-            upload_time=datetime.now(),
-            staging=True, restricted=False, user_id=str(test_user.user_id),
-            aux_files=auxfiles))
-
-    entry.persist(refresh='true')
-
-    yield entry
-
-    try:
-        calc = RepoCalc.get(id='test_upload_hash/test_calc_hash')
-    except NotFoundError:
-        pass
-    else:
-        calc.delete()
-
-
-def assert_elastic_calc(calc: RepoCalc):
-    assert calc is not None
-    for property in RepoCalc._doc_type.mapping:
-        assert getattr(calc, property) is not None
-
-    assert len(getattr(calc, 'aux_files')) > 0
-
-
-def test_create_elastic_calc(example_elastic_calc: RepoCalc, no_warn):
-    assert_elastic_calc(example_elastic_calc)
-    assert RepoCalc.upload_exists(example_elastic_calc.upload_hash)
-
-    get_result: RepoCalc = RepoCalc.get(
-        id='%s/%s' % (example_elastic_calc.upload_hash, example_elastic_calc.calc_hash))
-    assert_elastic_calc(get_result)
-
-
-def test_create_existing_elastic_calc(
-        example_elastic_calc: RepoCalc, normalized_template_example, test_user):
-
-    calc = RepoCalc.create_from_backend(
-        normalized_template_example,
-        upload_hash='test_upload_hash',
-        calc_hash='test_calc_hash',
-        upload_id='test_upload_id',
-        additional=dict(
-            mainfile='/test/mainfile',
-            upload_time=datetime.now(),
-            staging=True, restricted=False, user_id=str(test_user.user_id)))
-
-    try:
-        calc.persist(refresh='true')
-        assert False
-    except AlreadyExists:
-        pass
-    else:
-        assert False
-
-
-def test_delete_elastic_calc(example_elastic_calc: RepoCalc):
-    example_elastic_calc.delete()
-
-    assert not ArchiveFile('test_upload_hash/test_calc_hash').exists()
-    try:
-        RepoCalc.get(id='test_upload_hash/test_calc_hash')
-        assert False
-    except NotFoundError:
-        pass
-    else:
-        assert False
-
-
-def test_staging_elastic_calc(example_elastic_calc: RepoCalc, no_warn):
-    assert RepoCalc.get(id='test_upload_hash/test_calc_hash').staging
-
-
-def test_unstage_elastic_calc(example_elastic_calc: RepoCalc, no_warn):
-    RepoCalc.unstage(upload_id='test_upload_id', staging=False)
-
-    assert not RepoCalc.get(id='test_upload_hash/test_calc_hash').staging
diff --git a/tests/test_search.py b/tests/test_search.py
new file mode 100644
index 0000000000000000000000000000000000000000..7ad2d8301b933f14306229d7d2790ab42fc79b0f
--- /dev/null
+++ b/tests/test_search.py
@@ -0,0 +1,19 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import nomad.search  # pylint: disable=W0611
+
+
+def test_helloworld(elastic):
+    pass
diff --git a/tests_integration/test_client.py b/tests_integration/test_client.py
new file mode 100644
index 0000000000000000000000000000000000000000..da37982081adb5411b860f1c6d770c3f17be1e64
--- /dev/null
+++ b/tests_integration/test_client.py
@@ -0,0 +1,32 @@
+# Copyright 2018 Markus Scheidgen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an"AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from nomad.client import create_client, upload_file
+
+from tests.test_files import example_file
+
+
+@pytest.fixture(scope='session')
+def client():
+    return create_client()
+
+
+def test_client(client):
+    client.repo.get_calcs().response()
+
+
+def test_upload(client):
+    upload_file(example_file, client=client)