diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 1f082562b9b7b9edea0684a7e7a33257b037aa8d..32d9307cda156265ea6b390a084141d2a6f96bde 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -434,4 +434,4 @@ push to github:
     - git checkout ${CI_COMMIT_REF_NAME}
     - git push "https://${CI_GITHUB_ACCESS_TOKEN}@github.com/nomad-coe/nomad.git" ${CI_COMMIT_REF_NAME}
   rules:
-    - if: $CI_COMMIT_BRANCH == "develop" || $CI_COMMIT_TAG
\ No newline at end of file
+    - if: $CI_COMMIT_BRANCH == "develop" || $CI_COMMIT_TAG
diff --git a/Dockerfile b/Dockerfile
index 30a81ef4cc87415ad2f4d77c58492303bf200487..a4f4e6bf796a198bfd6b7505bcbd2070985a38c9 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -187,16 +187,16 @@ RUN find /usr/local/lib/python3.7/ -type d -name 'tests' ! -path '*/networkx/*'
 
 FROM base_python AS final
 
-RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash - \
- && apt-get update \
+RUN apt-get update \
  && apt-get install --yes --quiet --no-install-recommends \
-      nodejs \
-      npm \
-      libgomp1 \
-      libmagic1 \
-      curl \
-      zip \
-      unzip \
+       libgomp1 \
+       libmagic1 \
+       curl \
+       zip \
+       unzip \
+ && curl -fsSL https://deb.nodesource.com/setup_16.x | bash - \
+ && apt-get install --yes --quiet --no-install-recommends \
+       nodejs \
  && rm -rf /var/lib/apt/lists/* \
  && npm install -g configurable-http-proxy \
  && npm uninstall -g npm
diff --git a/dependencies/nomad-remote-tools-hub b/dependencies/nomad-remote-tools-hub
index 9aec5e9a4eeebbdf42c9b19bf83753fd4ab61fb4..816bc8d73ea0a59919fffbc4040112e3f326cdbb 160000
--- a/dependencies/nomad-remote-tools-hub
+++ b/dependencies/nomad-remote-tools-hub
@@ -1 +1 @@
-Subproject commit 9aec5e9a4eeebbdf42c9b19bf83753fd4ab61fb4
+Subproject commit 816bc8d73ea0a59919fffbc4040112e3f326cdbb
diff --git a/gui/src/components/api.js b/gui/src/components/api.js
index a269ab1109a1b935a837ff884002c673d86cd891..414232b8d4cfaaee2527ef1231b4e46aade7df56 100644
--- a/gui/src/components/api.js
+++ b/gui/src/components/api.js
@@ -31,7 +31,6 @@ import { useKeycloak } from '@react-keycloak/web'
 import axios from 'axios'
 import { useErrors } from './errors'
 import * as searchQuantities from '../searchQuantities.json'
-import { NorthApi } from './north/northApi'
 
 export class DoesNotExist extends Error {
   constructor(msg) {
@@ -397,7 +396,6 @@ export const APIProvider = React.memo(({
 
   const value = useMemo(() => ({
     api: api,
-    northApi: user ? new NorthApi(api, `users/${user.preferred_username}`) : null,
     resourcesApi: new ResourcesApi(api),
     user: user
   }), [api, user])
diff --git a/gui/src/components/north/NorthTool.js b/gui/src/components/north/NorthTool.js
index f1738aaabdebdf64394ad787d0bb03ab6f9167cb..66e346cfd72ec95aee6e4fd642d8f91b7da481cf 100644
--- a/gui/src/components/north/NorthTool.js
+++ b/gui/src/components/north/NorthTool.js
@@ -36,8 +36,8 @@ export function useNorthTool() {
 }
 
 const launchButtonLabels = {
-  'idle': 'Launch',
-  'launching': 'Launching...',
+  'stopped': 'Launch',
+  'starting': 'Launching...',
   'running': 'Open',
   'stopping': 'Launch'
 }
@@ -55,7 +55,7 @@ export const NorthToolButtons = React.memo(function NorthToolButton() {
   const {name, launch, stop, state} = useNorthTool()
   return (
     <Box display="flex" flexDirection="row">
-      <LaunchButton fullWidth name={name} onClick={launch} disabled={state === 'stopping' || state === 'launching' || !state}>
+      <LaunchButton fullWidth name={name} onClick={launch} disabled={state === 'stopping' || state === 'starting' || !state}>
         {launchButtonLabels[state] || 'not available'}
       </LaunchButton>
       {(state === 'running' || state === 'stopping') && (
@@ -90,55 +90,19 @@ const useStyles = makeStyles(theme => ({
 }))
 
 const NorthTool = React.memo(function NorthTool({tool, uploadId, path, children}) {
-  const {name, title, version, description, short_description, path_prefix, icon} = tool
+  const {name, title, version, description, short_description, icon} = tool
   const styles = useStyles()
-  const {northApi, user} = useApi()
+  const {api} = useApi()
   const {raiseError} = useErrors()
 
-  const [state, setState] = useState()
-
-  const toolUrl = useMemo(() => {
-    if (!user) {
-      return null
-    }
-    let toolPath = ''
-    if (path_prefix) {
-      toolPath += `/${path_prefix}`
-    }
-    if (uploadId) {
-      toolPath += `/uploads/${uploadId}`
-    }
-    if (path) {
-      toolPath += `/${path}`
-    }
-    const toolUrl = `${northBase}/user/${user.preferred_username}/${name}${toolPath}`
-    return toolUrl
-  }, [user, name, path_prefix, uploadId, path])
+  const [state, setState] = useState('stopped')
 
   const getToolStatus = useCallback(() => {
-    if (northApi === null) {
-      return
-    }
-    return northApi.get(`servers/${name}/progress`)
-      .then((response) => {
-        const data = JSON.parse(response.data.substr(6))
-        if (data.ready) {
-          return 'running'
-        } else {
-          return 'launching'
-        }
-      })
-      .catch(error => {
-        if (error?.response?.status === 404 || error?.response?.status === 400) {
-          return 'idle'
-        } else if (error.code === 'ERR_NETWORK') {
-          // north is unavailable
-          return undefined
-        } else {
-          raiseError(error)
-        }
-      })
-  }, [northApi, raiseError, name])
+    return api.get(`north/${name}`)
+      .then(response => {
+        return response.data.state
+      }).catch(raiseError)
+  }, [api, raiseError, name])
 
   useEffect(() => {
     const toolStatus = getToolStatus()
@@ -151,34 +115,32 @@ const NorthTool = React.memo(function NorthTool({tool, uploadId, path, children}
 
   const launch = useCallback(() => {
     // We get the current actual tools status and do not use the one used to display the status!
-    getToolStatus().then((toolStatus) => {
-      if (toolStatus === 'running') {
+    setState('starting')
+    api.post(`north/${name}`)
+      .then((response) => {
+        // const toolUrl = `${northBase}/${response.data.upload_urls[uploadId]}/${path}`
+        // name == response.tool == response.data.name
+        console.log(response)
+        const toolUrl = `${northBase}/user/${response.username}/${response.tool}`
+        console.log(toolUrl)
         window.open(toolUrl, name)
-        setState(toolStatus)
-      } else {
-        setState('launching')
-        northApi.post(`servers/${name}`)
-          .then((response) => {
-            window.open(toolUrl, name)
-            setState('running')
-          })
-          .catch(errors => {
-            raiseError(errors)
-            setState('idle')
-          })
-      }
-    })
-  }, [setState, northApi, raiseError, name, toolUrl, getToolStatus])
+        setState(response.data.state)
+      })
+      .catch(errors => {
+        raiseError(errors)
+        setState('stopped')
+      })
+  }, [setState, api, raiseError, name])
 
   const stop = useCallback(() => {
     setState('stopping')
-    northApi.delete(`servers/${name}`)
+    api.delete(`north/${name}`)
       .then((response) => {
         console.log(response)
-        setState('idle')
+        setState('stopped')
       })
       .catch(raiseError)
-  }, [northApi, raiseError, setState, name])
+  }, [api, raiseError, setState, name])
 
   const value = useMemo(() => ({
     state: state,
diff --git a/gui/src/components/north/northApi.js b/gui/src/components/north/northApi.js
deleted file mode 100644
index eb837c8c2b5a24cd784538461e1086dc6c97485f..0000000000000000000000000000000000000000
--- a/gui/src/components/north/northApi.js
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright The NOMAD Authors.
- *
- * This file is part of NOMAD. See https://nomad-lab.eu for further info.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import axios from 'axios'
-import { northBase } from '../../config'
-
-export class NorthApi {
-  constructor(api, base) {
-    this.api = api
-    this.apiKey = null
-    this.axios = axios.create({
-      baseURL: `${northBase}/hub/api${base ? '/' + base : ''}`
-    })
-  }
-
-  async getApiKey() {
-    if (this.apiKey) {
-      return this.apiKey
-    }
-
-    if (!this.api.keycloak.token) {
-      throw Error('User does not have a token. This should not happen')
-    }
-
-    const config = {
-      headers: {
-        accept: 'application/json',
-        Authorization: `Bearer ${this.api.keycloak.token}`
-      }
-    }
-
-    const response = await this.axios.post('tokens', null, config)
-    this.apiKey = response.data.token
-
-    return this.apiKey
-  }
-
-  async get(path, query, config) {
-    const method = (path, body, config) => this.axios.get(path, config)
-    return this.doHttpRequest(method, path, null, {params: query, ...config})
-  }
-
-  async post(path, body, config) {
-    const method = (path, body, config) => this.axios.post(path, body, config)
-    return this.doHttpRequest(method, path, body, config)
-  }
-
-  async put(path, body, config) {
-    const method = (path, body, config) => this.axios.put(path, body, config)
-    return this.doHttpRequest(method, path, body, config)
-  }
-
-  async delete(path, config) {
-    const method = (path, body, config) => this.axios.delete(path, config)
-    return this.doHttpRequest(method, path, null, config)
-  }
-
-  async doHttpRequest(method, path, body, config) {
-    const apiKey = await this.getApiKey()
-    config = config || {}
-    config.params = config.params || {}
-    config.headers = config.headers || {
-      accept: 'application/json',
-      Authorization: `token ${apiKey}`
-    }
-    try {
-      return method(path, body, config)
-    } catch (errors) {
-      if (config.noHandleErrors) {
-        throw errors
-      }
-    }
-  }
-}
diff --git a/gui/src/northTools.json b/gui/src/northTools.json
index 2266ada97d9dca842283ce1ae6ea755b451cbd96..dfdee8a35d498f39064dd29c40edfd4eec6db226 100644
--- a/gui/src/northTools.json
+++ b/gui/src/northTools.json
@@ -24,6 +24,7 @@
         "short_description": "Run NionSwift to analyze data as well as prepare focus series reconstructions",
         "description": "Run Nion Swift to analyze data.",
         "image": "gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-remote-tools-hub/nionswift-webtop:latest",
+        "privileged": true,
         "mount_path": "/config",
         "maintainer": [
             {
@@ -35,6 +36,7 @@
     "nexustools": {
         "description": "Includes multiple NeXus tools for visualization and analysis.",
         "image": "gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-remote-tools-hub/nexus-webtop:latest",
+        "privileged": true,
         "mount_path": "/config",
         "file_extensions": [
             "nxs",
@@ -79,6 +81,7 @@
         "short_description": "An example for analyzing mpes data.",
         "description": "This example presents the capabilities of the NOMAD platform to store and standardize multi photoemission spectroscopy (MPES) experimental data. It contains three major examples:\n\n- Taking a pre-binned file, here stored in a h5 file, and converting it into the standardized MPES NeXus format. There exists a [NeXus application definition for MPES](https://manual.nexusformat.org/classes/contributed_definitions/NXmpes.html#nxmpes) which details the internal structure of such a file.\n- Binning of raw data (see [here](https://www.nature.com/articles/s41597-020-00769-8) for additional resources) into a h5 file and consecutively generating a NeXus file from it.\n- An analysis example using data in the NeXus format and employing the [pyARPES](https://github.com/chstan/arpes) analysis tool to reproduce the main findings of [this paper](https://arxiv.org/pdf/2107.07158.pdf).",
         "image": "gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-remote-tools-hub/mpes-webtop",
+        "privileged": true,
         "mount_path": "/config",
         "file_extensions": [
             "ipynb",
@@ -120,6 +123,7 @@
     "webtop": {
         "description": "Baseline webtop image for test",
         "image": "gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-remote-tools-hub/webtop",
+        "privileged": true,
         "mount_path": "/config",
         "maintainer": [
             {
@@ -132,6 +136,7 @@
         "short_description": "An example for analyzing atom probe data.",
         "description": "Miscellaneous tools from the atom probe community:\nCurrently the Leoben APT_analyzer and the paraprobe-toolbox.",
         "image": "gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-remote-tools-hub/apmtools-webtop",
+        "privileged": true,
         "icon": "jupyter_logo.svg",
         "mount_path": "/config",
         "maintainer": [
@@ -158,6 +163,7 @@
         "short_description": "Inline electron holography by C. Koch",
         "description": "FRWR3 in-line holography/focus series reconstruction code",
         "image": "gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-remote-tools-hub/frwr-webtop",
+        "privileged": true,
         "icon": "jupyter_logo.svg",
         "mount_path": "/config",
         "maintainer": [
@@ -171,6 +177,7 @@
         "short_description": "Electronic structure supported image simulation for transmission electron microscopy.",
         "description": "VESTA, GPAW, and abTEM configured in one container for simulating images and diffraction patterns in transmission electron microscopy",
         "image": "gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-remote-tools-hub/abtem-webtop",
+        "privileged": true,
         "icon": "jupyter_logo.svg",
         "mount_path": "/config",
         "maintainer": [
diff --git a/nomad/app/v1/main.py b/nomad/app/v1/main.py
index ac3aa63ced6029b84778c22b325d42182f7e4ec6..b5b4392033f3e95c4b51cfa0764311b037e199ad 100644
--- a/nomad/app/v1/main.py
+++ b/nomad/app/v1/main.py
@@ -28,7 +28,7 @@ from nomad import config, utils
 from .common import root_path
 from .routers import (
     users, entries, materials, auth, info, datasets, uploads, suggestions, metainfo,
-    systems
+    north, systems
 )
 
 logger = utils.get_logger(__name__)
@@ -96,4 +96,5 @@ app.include_router(uploads.router, prefix='/uploads')
 app.include_router(metainfo.router, prefix='/metainfo')
 app.include_router(users.router, prefix='/users')
 app.include_router(suggestions.router, prefix='/suggestions')
+app.include_router(north.router, prefix='/north')
 app.include_router(systems.router, prefix='/systems')
diff --git a/nomad/app/v1/routers/north.py b/nomad/app/v1/routers/north.py
new file mode 100644
index 0000000000000000000000000000000000000000..80217e0254b5957854c2b3d2cabe6b58b6cb3d37
--- /dev/null
+++ b/nomad/app/v1/routers/north.py
@@ -0,0 +1,275 @@
+#
+# Copyright The NOMAD Authors.
+#
+# This file is part of NOMAD. See https://nomad-lab.eu for further info.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import requests
+
+from typing import List, Dict, cast, Optional
+from enum import Enum
+from pydantic import BaseModel
+from fastapi import APIRouter, Depends, status, HTTPException
+from mongoengine.queryset.visitor import Q
+
+from nomad import config
+from nomad.utils import strip, get_logger, slugify
+from nomad.processing import Upload
+from .auth import create_user_dependency, oauth2_scheme
+from ..models import User, HTTPExceptionModel
+from ..utils import create_responses
+
+
+default_tag = 'north'
+router = APIRouter()
+
+hub_api_headers = {'Authorization': f'Bearer {config.north.hub_service_api_token}'}
+logger = get_logger(__name__)
+
+
+class ToolStateEnum(str, Enum):
+    running = 'running',
+    starting = 'starting',
+    stopping = 'stopping',
+    stopped = 'stopped'
+
+
+class ToolModel(config.NorthTool):
+    name: str
+    state: Optional[ToolStateEnum]
+
+
+class ToolResponseModel(BaseModel):
+    tool: str
+    username: str
+    data: ToolModel
+
+
+class ToolsResponseModel(BaseModel):
+    data: List[ToolModel] = []
+
+
+_bad_tool_response = status.HTTP_404_NOT_FOUND, {
+    'model': HTTPExceptionModel,
+    'description': strip('''
+        The tool does not exist.''')}
+
+
+def _get_status(tool: ToolModel, user: User) -> ToolModel:
+    if not user:
+        return tool
+
+    url = f'{config.north.hub_url()}/api/users/{user.username}/servers/{tool.name}/progress'
+    response = requests.get(url, headers=hub_api_headers)
+
+    if response.status_code == 404:
+        # The user or the tool does not yet exist
+        tool.state = ToolStateEnum.stopped
+    elif response.status_code == 200:
+        if '"ready": true' in response.text:
+            tool.state = ToolStateEnum.running
+        else:
+            tool.state = ToolStateEnum.starting
+    else:
+        logger.error(
+            'unexpected jupyterhub response', data=dict(status_code=response.status_code),
+            text=response.text)
+        tool.state = ToolStateEnum.stopped
+
+    return tool
+
+
+@router.get(
+    '/', tags=[default_tag],
+    response_model=ToolsResponseModel,
+    summary='Get a list of all configured tools and their current state.',
+    response_model_exclude_unset=True,
+    response_model_exclude_none=True
+)
+async def get_tools(user: User = Depends(create_user_dependency())):
+    return ToolsResponseModel(
+        data=[
+            _get_status(ToolModel(name=name, **tool.dict()), user)
+            for name, tool in cast(Dict[str, config.NorthTool], config.north.tools).items()
+        ]
+    )
+
+
+async def tool(name: str) -> ToolModel:
+    if name not in config.north.tools:
+        raise HTTPException(
+            status_code=status.HTTP_404_NOT_FOUND,
+            detail='The tools does not exist.')
+
+    tool = cast(Dict[str, config.NorthTool], config.north.tools)[name]
+    return ToolModel(name=name, **tool.dict())
+
+
+@router.get(
+    '/{name}', tags=[default_tag],
+    summary='Get information for a specific tool.',
+    response_model=ToolResponseModel,
+    responses=create_responses(_bad_tool_response),
+    response_model_exclude_unset=True,
+    response_model_exclude_none=True
+)
+async def get_tool(
+    tool: ToolModel = Depends(tool),
+    user: User = Depends(create_user_dependency(required=True))
+):
+    return ToolResponseModel(
+        tool=tool.name,
+        username=user.username,
+        data=_get_status(tool, user))
+
+
+@router.post(
+    '/{name}', tags=[default_tag],
+    response_model=ToolResponseModel,
+    summary='Start a tool.',
+    response_model_exclude_unset=True,
+    response_model_exclude_none=True
+)
+async def start_tool(
+    tool: ToolModel = Depends(tool),
+    access_token: str = Depends(oauth2_scheme),
+    user: User = Depends(create_user_dependency(required=True))
+):
+    tool.state = ToolStateEnum.stopped
+
+    # Make sure the user exists
+    url = f'{config.north.hub_url()}/api/users/{user.username}'
+    response = requests.get(url, headers=hub_api_headers)
+    if response.status_code == 404:
+        response = requests.post(url, headers=hub_api_headers)
+        if response.status_code == 200:
+            logger.info('created north user', user_id=user.user_id)
+        else:
+            # TODO
+            logger.error('could not create north user', user_id=user.user_id)
+
+    # Make sure that the home folder of the user exists
+    user_home = os.path.join(config.fs.north_home, user.user_id)
+    if not os.path.exists(user_home):
+        os.makedirs(user_home)
+
+    def truncate(path_name):
+        # On Linux: The maximum length for a file name is 255 bytes
+        return path_name[:230]
+
+    user_id = str(user.user_id)
+    upload_query = Q()
+    upload_query &= Q(main_author=user_id) | Q(coauthors=user_id)
+    upload_query &= Q(publish_time=None)
+
+    uploads: List[Dict] = []
+    for upload in Upload.objects.filter(upload_query):
+
+        if not hasattr(upload.upload_files, 'external_os_path'):
+            # In case the files are missing for one reason or another
+            logger.info('upload: the external path is missing for one reason or another')
+            continue
+
+        if upload.upload_name:
+            upload_dir = f'uploads/{truncate(slugify(upload.upload_name))}-{upload.upload_id}'
+        else:
+            upload_dir = f'uploads/{upload.upload_id}'
+
+        uploads.append(
+            {
+                'host_path': os.path.join(upload.upload_files.external_os_path, 'raw'),
+                'mount_path': os.path.join(tool.mount_path, upload_dir)
+            }
+        )
+
+    # Check if the tool/named server already exists
+    _get_status(tool, user)
+    if tool.state != ToolStateEnum.stopped:
+        return ToolResponseModel(
+            tool=tool.name,
+            username=user.username,
+            data=_get_status(tool, user))
+
+    url = f'{config.north.hub_url()}/api/users/{user.username}/servers/{tool.name}'
+    body = {
+        'tool': {
+            'image': tool.image,
+            'cmd': tool.cmd,
+            'privileged': tool.privileged
+        },
+        'environment': {
+            'SUBFOLDER': f'{config.services.api_base_path.rstrip("/")}/north/user/{user.username}/',
+            'JUPYTERHUB_CLIENT_API_URL': f'{config.north_url()}/hub/api'
+        },
+        'user_home': {
+            'host_path': os.path.join(config.fs.north_home_external, user.user_id),
+            'mount_path': os.path.join(tool.mount_path, 'work')
+        },
+        'uploads': uploads
+    }
+
+    logger.info('body of the post call', body=body)
+
+    response = requests.post(url, json=body, headers=hub_api_headers)
+
+    if response.status_code == 400 and 'is already running' in response.json()['message']:
+        tool.state = ToolStateEnum.running
+    elif response.status_code == 201:
+        tool.state = ToolStateEnum.running
+    elif response.status_code == 202:
+        tool.state = ToolStateEnum.starting
+    else:
+        logger.error(
+            'unexpected jupyterhub response', data=dict(status_code=response.status_code),
+            text=response.text)
+        tool.state = ToolStateEnum.stopped
+
+    return ToolResponseModel(
+        tool=tool.name,
+        username=user.username,
+        data=_get_status(tool, user))
+
+
+@router.delete(
+    '/{name}', tags=[default_tag],
+    response_model=ToolResponseModel,
+    summary='Stop a tool.',
+    response_model_exclude_unset=True,
+    response_model_exclude_none=True
+)
+async def stop_tool(
+    tool: ToolModel = Depends(tool),
+    user: User = Depends(create_user_dependency(required=True))
+):
+    url = f'{config.north.hub_url()}/api/users/{user.username}/servers/{tool.name}'
+    response = requests.delete(url, json={'remove': True}, headers=hub_api_headers)
+
+    if response.status_code == 404:
+        tool.state = ToolStateEnum.stopped
+    elif response.status_code == 204:
+        tool.state = ToolStateEnum.stopped
+    elif response.status_code == 202:
+        tool.state = ToolStateEnum.stopping
+    else:
+        logger.error(
+            'unexpected jupyterhub response', data=dict(status_code=response.status_code),
+            text=response.text)
+        tool.state = ToolStateEnum.stopped
+
+    return ToolResponseModel(
+        tool=tool.name,
+        username=user.username,
+        data=_get_status(tool, user))
diff --git a/nomad/config.py b/nomad/config.py
index 1303c4d346675aef783d2a2eed2924524d5a34b6..13dfb7c776c9be6c9bf86ceeb38b23e515aa0255 100644
--- a/nomad/config.py
+++ b/nomad/config.py
@@ -38,9 +38,10 @@ import inspect
 import os.path
 import yaml
 import warnings
+import json
 from typing import TypeVar, List, Dict, Any, Union, cast
 from pkg_resources import get_distribution, DistributionNotFound
-from pydantic import BaseModel, Field
+from pydantic import BaseModel, Field, validator
 
 try:
     __version__ = get_distribution("nomad-lab").version
@@ -260,6 +261,24 @@ _jupyterhub_config_description = '''
 '''
 
 
+class NorthToolMaintainer(BaseModel):
+    name: str
+    email: str
+
+
+class NorthTool(BaseModel):
+    image: str
+    description: str = None
+    short_description: str = None
+    cmd: str = None
+    privileged: bool = None
+    path_prefix: str = None
+    mount_path: str = None
+    icon: str = None
+    file_extensions: List[str] = []
+    maintainer: List[NorthToolMaintainer] = []
+
+
 class North(NomadSettings):
     '''
     Settings related to the operation of the NOMAD remote tools hub service *north*.
@@ -282,19 +301,31 @@ class North(NomadSettings):
     ''')
     jupyterhub_crypt_key: str = Field(None, description=_jupyterhub_config_description)
 
-    shared_fs = Field('.volumes/fs/north/shared', description='''
-        Path to the shared folder on the host machine. This is mounted into spawned
-        containers to be shared by all users.
-    ''')
-    users_fs = Field('.volumes/fs/north/users', description='''
-        Path to a folder on the host machine. Sub-directories with the username are mounted
-        into spawned containers to persist files per user.
-    ''')
     nomad_host: str = Field(
         None, description='The NOMAD app host name that spawned containers use.')
     windows = Field(
         True, description='Enable windows OS hacks.')
 
+    tools: Union[str, Dict[str, NorthTool]] = Field(
+        'dependencies/nomad-remote-tools-hub/tools.json',
+        description='The available north tools. Either the tools definitions as dict or a path to a .json file.')
+
+    hub_service_api_token: str = Field('secret-token', description='''
+        A secret token shared between NOMAD and the NORTH jupyterhub.
+        This needs to be the token of an admin service.''')
+
+    def hub_url(self):
+        return f'http://{self.hub_host}:{self.hub_port}{services.api_base_path}/north/hub'
+
+    @validator('tools', pre=True, always=True)
+    def load_tools(cls, v):  # pylint: disable=no-self-argument
+        if isinstance(v, str):
+            # interpret as file
+            with open(v, 'rt') as f:
+                v = json.load(f)
+
+        return v
+
 
 north = North()
 
@@ -340,6 +371,8 @@ class FS(NomadSettings):
     staging_external: str = None
     public = '.volumes/fs/public'
     public_external: str = None
+    north_home = '.volumes/fs/north/users'
+    north_home_external: str = None
     local_tmp = '/tmp'
     prefix_size = 2
     archive_version_suffix = 'v1'
@@ -1426,6 +1459,8 @@ def _check_config():
         AssertionError: if there is a contradiction or invalid values in the
             config file settings.
     """
+    # TODO more if this should be translated into pydantic validations.
+
     # The AFLOW symmetry information is checked once on import
     proto_symmetry_tolerance = normalize.prototype_symmetry_tolerance
     symmetry_tolerance = normalize.symmetry_tolerance
@@ -1443,27 +1478,19 @@ def _check_config():
     if keycloak.public_server_url is None:
         keycloak.public_server_url = keycloak.server_url
 
-    def set_external_path(source_obj, source_key, target_obj, target_key, overwrite=False):
-        source_value = getattr(source_obj, source_key)
-        target_value = getattr(target_obj, target_key)
-
-        if target_value and not overwrite:
-            return
-
-        if not source_value:
-            return
+    def get_external_path(path):
+        if fs.external_working_directory and not os.path.isabs(path):
+            return os.path.join(fs.external_working_directory, path)
+        return path
 
-        if fs.external_working_directory and not os.path.isabs(source_value):
-            target_value = os.path.join(fs.external_working_directory, source_value)
-        else:
-            target_value = source_value
+    if fs.staging_external is None:
+        fs.staging_external = get_external_path(fs.staging)
 
-        setattr(target_obj, target_key, target_value)
+    if fs.public_external is None:
+        fs.public_external = get_external_path(fs.public)
 
-    set_external_path(fs, 'staging', fs, 'staging_external')
-    set_external_path(fs, 'public', fs, 'public_external')
-    set_external_path(north, 'users_fs', north, 'users_fs', overwrite=True)
-    set_external_path(north, 'shared_fs', north, 'shared_fs', overwrite=True)
+    if fs.north_home_external is None:
+        fs.north_home_external = get_external_path(fs.north_home)
 
 
 def _merge(a: dict, b: dict, path: List[str] = None) -> dict:
diff --git a/nomad/jupyterhub_config.py b/nomad/jupyterhub_config.py
index a55ef1fa72576a88bce85aa974fdd43b86b1a8d4..b904c28109a0aafb7b5ad2d0f9347319e898495d 100644
--- a/nomad/jupyterhub_config.py
+++ b/nomad/jupyterhub_config.py
@@ -22,137 +22,57 @@ NOMAD servers.
 '''
 
 from dockerspawner.dockerspawner import DockerSpawner
-from jupyterhub.handlers.base import BaseHandler
 from oauthenticator.generic import GenericOAuthenticator
-import os
-import os.path
-import requests
-import json
 
-from nomad import config, infrastructure
+from nomad import config
 
-# TODO The AnonymousLogin kinda works, but it won't logout or remove old containers,
-# if the user is still logged in non anonymously.
-
-
-class AnonymousLoginHandler(BaseHandler):
-    async def get(self):
-        # TODO somehow set a fake user to avoid login
-        # - read cookie and use cookie, or create, set, and use cookie
-        await self.login_user(data='anonymous')
-        self.redirect(self.get_next_url())
-
-
-class NomadAuthenticator(GenericOAuthenticator):
-    '''
-    A custom OAuth authenticator. It can be used with NOMAD's keycloak. It imeplement
-    a `pre_spawn_start` hook that passes user upload mounts to the spawner.
-    '''
-
-    def get_handlers(self, app):
-        ''' Add an additional handler for anonymous logins. '''
-        handlers = super().get_handlers(app)
-        return handlers + [(r'/anonymous_login', AnonymousLoginHandler)]
-
-    async def authenticate(self, handler, data=None):
-        if data == 'anonymous':
-            return 'anonymous'
-
-        try:
-            return await super().authenticate(handler, data=data)
-        except Exception as e:
-            # The default authenticate has failed, e.g. due to missing credentials.
-            # Check if there is bearer authorization and try to identify the user with
-            # this. Otherwise, propagate the exception.
-            authorization = handler.request.headers.get('Authorization', None)
-            if not (authorization and authorization.lower().startswith('bearer ')):
-                raise e
-
-        # Use the access token to authenticate the user
-        access_token = authorization[7:]
-        payload = infrastructure.keycloak.decode_access_token(access_token)
-
-        authenticated = {
-            'name': payload['preferred_username'],
-            'auth_state': {
-                'access_token': access_token,
-                'refresh_token': None,  # It is unclear how we can get the refresh
-                                        # token. The only way is through a proper oauth
-                                        # flow. But we can't do that if the hub is only
-                                        # used via API and not the hub pages.
-                                        # For now the assumption is that
-                                        # - we implicitly "refresh" the access token with
-                                        #   each API call that causes this to run
-                                        # - we assume that jhub we automatically go through
-                                        #   a oauth flow, once the access token is expired
-                                        #   and it does not find a stored refresh token.
-                'oauth_user': payload,
-                'scope': []}}
-        # This will save the users authstate
-        await handler.auth_to_user(authenticated)
-
-        return authenticated
-
-    async def pre_spawn_start(self, user, spawner):
-        '''
-        Uses the user credentials to request all staging uploads and pass the
-        respective path as volume host mounts to the spawner.
-        '''
-        # This is guacamole specific
-        # linuxserver/webtop guacamole-lite based guacamole client use SUBFOLDER to
-        # confiugure base path
-        if not spawner.environment:
-            spawner.environment = {}
-        spawner.environment['SUBFOLDER'] = f'{config.services.api_base_path.rstrip("/")}/north/user/{user.name}/'
-        spawner.environment['JUPYTERHUB_CLIENT_API_URL'] = f'{config.north_url()}/hub/api'
-        spawner.environment['NOMAD_KEYCLOAK_REALM_NAME'] = config.keycloak.realm_name
-        spawner.environment['NOMAD_CLIENT_URL'] = config.api_url(api_host=config.north.nomad_host)
+c = get_config()  # type: ignore  # pylint: disable=undefined-variable
 
-        if user.name == 'anonymous':
-            return
 
-        try:
-            auth_state = await user.get_auth_state()
-            if not auth_state:
-                self.log.warn('Authentication state is not configured!')
-                return
-            access_token = auth_state['access_token']
-            api_headers = {'Authorization': f'Bearer {access_token}'}
-            spawner.environment['NOMAD_CLIENT_ACCESS_TOKEN'] = access_token
+def pre_spawn(spawner):
+    if spawner.handler.current_user.name != 'nomad-service':
+        # Do nothing, will only launch the default image with no volumes.
 
-            uploads_response = requests.get(
-                f'{config.api_url().rstrip("/")}/v1/uploads?is_published=false&page_size=100',
-                headers=api_headers)
-        except Exception as e:
-            self.log.error('Cannot access Nomad API: %s', e)
-            return
+        # Only the nomad-service can launch specialized tools with mounted volumes
+        if spawner.name:
+            spawner.log.error(f'The {spawner.name} server is not allowed to start this way, raise an error')
+            raise NotImplementedError('Only the nomad-service can launch specialized tools.')
 
-        if uploads_response.status_code != 200:
-            self.log.error('Cannot get user uploads: %s', uploads_response.text)
-            return
+        return
 
-        volumes = {}
+    user_home = spawner.user_options.get('user_home')
+    if user_home:
+        spawner.volumes[user_home['host_path']] = {
+            'mode': 'rw',
+            'bind': user_home['mount_path']
+        }
 
-        def add_volume(host_path, mount_path):
-            host_path = os.path.abspath(host_path)
-            volumes[host_path] = mount_path
+    uploads = spawner.user_options.get('uploads', [])
+    for upload in uploads:
+        spawner.volumes[upload['host_path']] = {
+            'mode': 'rw',
+            'bind': upload['mount_path']
+        }
 
-        add_volume(os.path.join(config.north.users_fs, user.name), f'/prefix/work')
-        add_volume(os.path.join(config.north.shared_fs), f'/prefix/shared')
+    environment = spawner.user_options.get('environment', {})
+    spawner.environment.update(environment)
 
-        for upload in uploads_response.json()['data']:
-            if 'upload_files_server_path' in upload:
-                upload_id = upload['upload_id']
-                upload_server_path = upload['upload_files_server_path']
-                add_volume(f'{upload_server_path}/raw', f'/prefix/uploads/{upload_id}')
+    tool = spawner.user_options.get('tool')
+    if tool:
+        spawner.image = tool.get('image')
+        spawner.cmd = tool.get('cmd')
 
-        self.log.debug('Configure spawner with nomad volumes: %s', volumes)
 
-        spawner.volumes = volumes
-        spawner.nomad_username = user.name
+c.Spawner.pre_spawn_hook = pre_spawn
 
-
-c = get_config()  # type: ignore  # pylint: disable=undefined-variable
+# configure nomad service
+c.JupyterHub.services = [
+    {
+        "name": "nomad-service",
+        "admin": True,
+        "api_token": config.north.hub_service_api_token,
+    }
+]
 
 # Allow named single-user servers per user (Default: False)
 c.JupyterHub.allow_named_servers = True
@@ -161,24 +81,15 @@ c.JupyterHub.allow_named_servers = True
 # user-redirect. (Default: '')
 c.JupyterHub.default_server_name = 'jupyter'
 
-# TODO: This is temporary.  Place everything behind a single origin aka nginx proxy
-c.JupyterHub.tornado_settings = {
-    'headers': {
-        'Access-Control-Allow-Origin': '*',
-        'Access-Control-Allow-Headers': '*',
-        'Access-Control-Allow-Methods': '*'
-    },
-}
 
 # The public facing URL of the whole JupyterHub application. This is the address on which
 # the proxy will bind. (Default: 'http://:8000')
 c.JupyterHub.bind_url = f'http://:9000/{config.services.api_base_path.strip("/")}/north'
 
+# configure authenticator
 nomad_public_keycloak = f'{config.keycloak.public_server_url.rstrip("/")}/realms/{config.keycloak.realm_name}'
 nomad_keycloak = f'{config.keycloak.server_url.rstrip("/")}/realms/{config.keycloak.realm_name}'
-
-c.JupyterHub.authenticator_class = NomadAuthenticator
-
+c.JupyterHub.authenticator_class = GenericOAuthenticator
 c.GenericOAuthenticator.login_service = 'keycloak'
 c.GenericOAuthenticator.client_id = 'nomad_public'
 c.GenericOAuthenticator.authorize_url = f'{nomad_public_keycloak}/protocol/openid-connect/auth'
@@ -186,51 +97,32 @@ c.GenericOAuthenticator.token_url = f'{nomad_keycloak}/protocol/openid-connect/t
 c.GenericOAuthenticator.userdata_url = f'{nomad_keycloak}/protocol/openid-connect/userinfo'
 c.GenericOAuthenticator.userdata_params = {'state': 'state'}
 c.GenericOAuthenticator.username_key = 'preferred_username'
-# c.GenericOAuthenticator.userdata_method = 'GET'
 c.GenericOAuthenticator.scope = ['openid', 'profile']
-
 c.Authenticator.auto_login = True
 c.Authenticator.enable_auth_state = True
 
 
-class NomadDockerSpawner(DockerSpawner):
-
-    async def start(self, image=None, extra_create_kwargs=None, extra_host_config=None):
-        self.log.debug(f'Configuring spawner for container {self.container_name}')
-        tool = self.container_name.split('-')[-1]
-        if tool in tools:
-            tools[tool](self)
-            self.log.debug(f'Configured spawner for {tool}')
-        else:
-            self.log.error(f'{tool} is not a tool, raise an error')
-            raise NotImplementedError('You cannot launch non tool containers.')
-
-        return await super().start(image, extra_create_kwargs, extra_host_config)
-
+# configure docker spawner
+class DockerSpawnerWithWindowsFixes(DockerSpawner):
     def _docker(self, method, *args, **kwargs):
-        if config.north.windows:
-            tries = 0
-            max_tries = 1
-            if method == 'port':
-                max_tries = 3
-            while tries < max_tries:
-                result = super()._docker(method, *args, **kwargs)
-                if result is not None:
-                    break
-                import time
-                time.sleep(3)
-                tries += 1
-
-            return result
-        else:
-            return super()._docker(method, *args, **kwargs)
-
-
-# launch with docker
-c.JupyterHub.spawner_class = NomadDockerSpawner
-c.DockerSpawner.image = 'jupyter/base-notebook'
+        tries = 0
+        max_tries = 1
+        if method == 'port':
+            max_tries = 3
+        while tries < max_tries:
+            result = super()._docker(method, *args, **kwargs)
+            if result is not None:
+                break
+            import time
+            time.sleep(3)
+            tries += 1
+
+        return result
+
+
+c.JupyterHub.spawner_class = DockerSpawner if not config.north.windows else DockerSpawnerWithWindowsFixes
+c.DockerSpawner.image = 'jupyter/datascience-notebook'
 c.DockerSpawner.remove = True
-
 # Prefix for container names. See name_template for full container name for a particular
 # user's server. (Default: 'jupyter')
 c.DockerSpawner.prefix = 'noamd_oasis_north'
@@ -247,60 +139,6 @@ if config.north.hub_connect_ip:
 if config.north.hub_connect_url:
     c.DockerSpawner.hub_connect_url = config.north.hub_connect_url
 
-
-def create_configure_from_tool_json(tool_json):
-    def configure(spawner: DockerSpawner):
-        spawner.image = tool_json['image']
-        if 'cmd' in tools_json:
-            spawner.cmd = tools_json['cmd']
-
-        if 'mount_path' not in tool_json:
-            err_msg = 'mount_path missing in tools.json'
-            spawner.log.error(err_msg)
-            raise KeyError(err_msg)
-
-        for key, value in spawner.volumes.items():
-            if isinstance(value, str) \
-               and value.startswith('/prefix'):
-                spawner.volumes[key] = value.replace('/prefix', tool_json['mount_path'])
-
-        hosts = tool_json.get('extra_hosts', {})
-        spawner.extra_host_config.update({
-            "extra_hosts": hosts
-        })
-
-        mounts = tool_json.get('mounts', {})
-        if not isinstance(mounts, dict):
-            spawner.log.error(f'Mounts entry {mounts} in tools.json is not valid.')
-            return configure
-
-        for key, value in mounts.items():
-            if not (isinstance(value, dict) and 'bind' in value):
-                spawner.log.error(f"{key}:{value} is not a valid mounts entry.")
-                continue
-
-            if not value['bind'].startswith('/prefix'):
-                spawner.log.error(
-                    f'The "bind" key in {key} needs to be preceeded by /prefix'
-                )
-                continue
-
-            spawner.volumes[key] = {
-                'bind': value['bind'].replace('/prefix', tool_json['mount_path']),
-                'mode': value.get('mode', 'r')
-            }
-
-    return configure
-
-
-tools_json_path = os.path.join(
-    os.path.dirname(os.path.abspath(__file__)),
-    '../dependencies/nomad-remote-tools-hub/tools.json')
-
-with open(tools_json_path, 'rt') as f:
-    tools_json = json.load(f)
-
-tools = {
-    key: create_configure_from_tool_json(value)
-    for key, value in tools_json.items()
-}
+# Fixing: Unexpected error: "Gateway Time-out (504)". Please try again and let us know, if this error keeps happening.
+c.DockerSpawner.http_timeout = 5 * 60  # in seconds
+c.DockerSpawner.start_timeout = 10 * 60  # in seconds
diff --git a/nomad/utils/__init__.py b/nomad/utils/__init__.py
index b5ee7481ae89cff4696d59a87fc88435dbc99df8..d23b6bb0ef323aaa9c49f524951caa4c48befdea 100644
--- a/nomad/utils/__init__.py
+++ b/nomad/utils/__init__.py
@@ -54,6 +54,8 @@ import logging
 import inspect
 import orjson
 import os
+import unicodedata
+import re
 
 from nomad import config
 
@@ -533,6 +535,21 @@ def deep_get(dictionary, *keys):
     return reduce(lambda d, key: d[key] if isinstance(key, int) else d.get(key) if d else None, keys, dictionary)
 
 
+def slugify(value):
+    '''
+    Taken from https://github.com/django/django/blob/master/django/utils/text.py
+    Convert to ASCII if 'allow_unicode' is False. Convert spaces or repeated
+    dashes to single dashes. Remove characters that aren't alphanumerics,
+    underscores, or hyphens. Convert to lowercase. Also strip leading and
+    trailing whitespace, dashes, and underscores.
+    '''
+
+    value = str(value)
+    value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
+    value = re.sub(r'[^\w\s-]', '', value.lower())
+    return re.sub(r'[-\s]+', '-', value).strip('-_')
+
+
 def query_list_to_dict(path_list: List[Union[str, int]], value: Any) -> Dict[str, Any]:
     '''Transforms a list of path fragments into a dictionary query. E.g. the list
 
diff --git a/ops/docker-compose/nomad-oasis/docker-compose.yaml b/ops/docker-compose/nomad-oasis/docker-compose.yaml
index f6f170bd9a8581f456519beed32956e8ffc23090..d5e29e05222849c565e6a0962bba1b7c9ddb20b4 100644
--- a/ops/docker-compose/nomad-oasis/docker-compose.yaml
+++ b/ops/docker-compose/nomad-oasis/docker-compose.yaml
@@ -101,6 +101,7 @@ services:
       NOMAD_RABBITMQ_HOST: rabbitmq
       NOMAD_ELASTIC_HOST: elastic
       NOMAD_MONGO_HOST: mongo
+      NOMAD_NORTH_HUB_HOST: north
     depends_on:
       rabbitmq:
         condition: service_healthy
@@ -108,6 +109,8 @@ services:
         condition: service_healthy
       mongo:
         condition: service_healthy
+      north:
+        condition: service_started
     volumes:
       - ./configs/nomad.yaml:/app/nomad.yaml
       - ./.volumes/fs:/app/.volumes/fs
@@ -140,9 +143,6 @@ services:
       NOMAD_RABBITMQ_HOST: rabbitmq
       NOMAD_ELASTIC_HOST: elastic
       NOMAD_MONGO_HOST: mongo
-    depends_on:
-      app:
-        condition: service_started
     volumes:
       - ./configs/nomad.yaml:/app/nomad.yaml
       - ./.volumes/fs:/app/.volumes/fs
diff --git a/ops/kubernetes/deployments/dev-values.yaml b/ops/kubernetes/deployments/dev-values.yaml
index 2d33428f95f1022e20cbcda6c98e5e3507d8df10..64d2aa8437bfe1375f9ccac761c0798a8e2364c7 100644
--- a/ops/kubernetes/deployments/dev-values.yaml
+++ b/ops/kubernetes/deployments/dev-values.yaml
@@ -43,6 +43,7 @@ volumes:
   prefixSize: 1
   public: /nomad/fairdi/dev/v1/fs/public
   staging: /nomad/fairdi/dev/v1/fs/staging
+  north_home: /nomad/fairdi/dev/v1/fs/north/users
   tmp: /nomad/fairdi/dev/v1/fs/tmp
   nomad: /nomad
 
@@ -50,8 +51,10 @@ services:
   jupyterhub:
     enabled: true
 
+north:
+  hubServiceApiTokenSecret: "nomad-hub-service-api-token"
+
 jupyterhub:
-  # enabled: true
   fullnameOverride: null
   nameOverride: "north"
   hub:
diff --git a/ops/kubernetes/deployments/prod-develop-values.yaml b/ops/kubernetes/deployments/prod-develop-values.yaml
index ffd314f758b80ba2bcfe77d7548daf73bc6e7271..6d113ddc311c14101daa9aecd8bf1425fbdd36cf 100644
--- a/ops/kubernetes/deployments/prod-develop-values.yaml
+++ b/ops/kubernetes/deployments/prod-develop-values.yaml
@@ -61,6 +61,7 @@ volumes:
   prefixSize: 1
   public: /nomad/fairdi/develop/fs/public
   staging: /nomad/fairdi/develop/fs/staging
+  north_home: /nomad/fairdi/develop/fs/north/users
   tmp: /nomad/fairdi/develop/fs/tmp
   nomad: /nomad
   archiveVersionSuffix: v1
@@ -75,11 +76,13 @@ datacite:
   secret: "nomad-datacite"
   enabled: false
 
-
 services:
   jupyterhub:
     enabled: true
 
+north:
+  hubServiceApiTokenSecret: "nomad-hub-service-api-token"
+
 jupyterhub:
   fullnameOverride: "nomad-develop-v1-north"
   hub:
@@ -97,20 +100,4 @@ jupyterhub:
         userdata_url: https://nomad-lab.eu/fairdi/keycloak/auth/realms/fairdi_nomad_prod/protocol/openid-connect/userinfo
   singleuser:
     podNameTemplate: "nomad-develop-v1-north-{username}--{servername}"
-    initContainers:
-      - name: fix-permissions
-        image: busybox
-        imagePullPolicy: IfNotPresent
-        securityContext:
-          runAsUser: 0
-        command: ["/bin/sh", "-c", "chown 1000:1000 /home/jovyan"]
-        volumeMounts:
-          - mountPath: /home/jovyan
-            name: home
-            subPath: "{username}"
-    storage:
-      type: static
-      homeMountPath: /home/jovyan/work
-      static:
-        pvcName: nomad-develop-v1-north-home
-        subPath: "{username}"
+
diff --git a/ops/kubernetes/deployments/prod-staging-values.yaml b/ops/kubernetes/deployments/prod-staging-values.yaml
index d812d0604b193d2c299de83eb513d9f820b39aba..250785386f61107678340510c2826838fd591bc3 100644
--- a/ops/kubernetes/deployments/prod-staging-values.yaml
+++ b/ops/kubernetes/deployments/prod-staging-values.yaml
@@ -63,6 +63,7 @@ volumes:
   prefixSize: 1
   public: /nomad/fairdi/prod/fs/public
   staging: /nomad/fairdi/prod/fs/staging
+  north_home: /nomad/fairdi/prod/fs/north/users
   tmp: /nomad/fairdi/prod/fs/tmp
   nomad: /nomad
   archiveVersionSuffix: v1
@@ -81,6 +82,9 @@ services:
   jupyterhub:
     enabled: true
 
+north:
+  hubServiceApiTokenSecret: "nomad-hub-service-api-token"
+
 jupyterhub:
   fullnameOverride: "nomad-staging-v1-north"
   hub:
@@ -98,21 +102,5 @@ jupyterhub:
         userdata_url: https://nomad-lab.eu/fairdi/keycloak/auth/realms/fairdi_nomad_prod/protocol/openid-connect/userinfo
   singleuser:
     podNameTemplate: "nomad-staging-v1-north-{username}--{servername}"
-    initContainers:
-      - name: fix-permissions
-        image: busybox
-        imagePullPolicy: IfNotPresent
-        securityContext:
-          runAsUser: 0
-        command: ["/bin/sh", "-c", "chown 1000:1000 /home/jovyan"]
-        volumeMounts:
-          - mountPath: /home/jovyan
-            name: home
-            subPath: "{username}"
-    storage:
-      type: static
-      homeMountPath: /home/jovyan/work
-      static:
-        pvcName: nomad-staging-v1-north-home
-        subPath: "{username}"
+
 
diff --git a/ops/kubernetes/deployments/prod-test-values.yaml b/ops/kubernetes/deployments/prod-test-values.yaml
index b6e0e36e09be32597e33404f4e78497f9845d93e..bb5dcb75d82f36937070b93e9f32192951ca4617 100644
--- a/ops/kubernetes/deployments/prod-test-values.yaml
+++ b/ops/kubernetes/deployments/prod-test-values.yaml
@@ -55,6 +55,7 @@ volumes:
   prefixSize: 1
   public: /nomad/fairdi/test/fs/public
   staging: /nomad/fairdi/test/fs/staging
+  north_home: /nomad/fairdi/test/fs/north/users
   tmp: /nomad/fairdi/test/fs/tmp
   nomad: /nomad
   archiveVersionSuffix: v1
@@ -69,37 +70,23 @@ services:
   jupyterhub:
     enabled: false
 
-# jupyterhub:
-#   fullnameOverride: "nomad-test-v1-north"
-#   hub:
-#     baseUrl: "/prod/v1/test/"
-#     db:
-#       type: sqlite-pvc
-#       pvc:
-#         storageClassName: nomad-test-v1-north-hub-db
-#     config:
-#       GenericOAuthenticator:
-#         client_id: nomad_public
-#         oauth_callback_url: https://nomad-lab.eu/prod/v1/test/north/hub/oauth_callback
-#         authorize_url: https://nomad-lab.eu/fairdi/keycloak/auth/realms/fairdi_nomad_prod/protocol/openid-connect/auth
-#         token_url: https://nomad-lab.eu/fairdi/keycloak/auth/realms/fairdi_nomad_prod/protocol/openid-connect/token
-#         userdata_url: https://nomad-lab.eu/fairdi/keycloak/auth/realms/fairdi_nomad_prod/protocol/openid-connect/userinfo
-#   singleuser:
-#     podNameTemplate: "nomad-test-v1-north-{username}--{servername}"
-#     initContainers:
-#       - name: fix-permissions
-#         image: busybox
-#         imagePullPolicy: IfNotPresent
-#         securityContext:
-#           runAsUser: 0
-#         command: ["/bin/sh", "-c", "chown 1000:1000 /home/jovyan"]
-#         volumeMounts:
-#           - mountPath: /home/jovyan
-#             name: home
-#             subPath: "{username}"
-#     storage:
-#       type: static
-#       homeMountPath: /home/jovyan/work
-#       static:
-#         pvcName: nomad-test-v1-north-home
-#         subPath: "{username}"
+north:
+  hubServiceApiTokenSecret: "nomad-hub-service-api-token"
+
+jupyterhub:
+  fullnameOverride: "nomad-test-v1-north"
+  hub:
+    baseUrl: "/prod/v1/test/"
+    db:
+      type: sqlite-pvc
+      pvc:
+        storageClassName: nomad-test-v1-north-hub-db
+    config:
+      GenericOAuthenticator:
+        client_id: nomad_public
+        oauth_callback_url: https://nomad-lab.eu/prod/v1/test/north/hub/oauth_callback
+        authorize_url: https://nomad-lab.eu/fairdi/keycloak/auth/realms/fairdi_nomad_prod/protocol/openid-connect/auth
+        token_url: https://nomad-lab.eu/fairdi/keycloak/auth/realms/fairdi_nomad_prod/protocol/openid-connect/token
+        userdata_url: https://nomad-lab.eu/fairdi/keycloak/auth/realms/fairdi_nomad_prod/protocol/openid-connect/userinfo
+  singleuser:
+    podNameTemplate: "nomad-test-v1-north-{username}--{servername}"
diff --git a/ops/kubernetes/deployments/prod-util-values.yaml b/ops/kubernetes/deployments/prod-util-values.yaml
index 7993b9429a7108fbc45e4604cc7f2f9dd90b171b..67b4b9fda1600e9e4ea4e1d0b4d3771ae805f384 100644
--- a/ops/kubernetes/deployments/prod-util-values.yaml
+++ b/ops/kubernetes/deployments/prod-util-values.yaml
@@ -63,6 +63,7 @@ volumes:
   prefixSize: 1
   public: /nomad/fairdi/prod/fs/public
   staging: /nomad/fairdi/prod/fs/staging
+  north_home: /nomad/fairdi/prod/fs/north/users
   tmp: /nomad/fairdi/prod/fs/tmp
   nomad: /nomad
   archiveVersionSuffix: v1
diff --git a/ops/kubernetes/deployments/prod-values.yaml b/ops/kubernetes/deployments/prod-values.yaml
index 71ffc992531f33dc3ef5163fe8c62601203eb804..17db4e76737060c02d098bc745f93e7aba0ca496 100644
--- a/ops/kubernetes/deployments/prod-values.yaml
+++ b/ops/kubernetes/deployments/prod-values.yaml
@@ -60,6 +60,7 @@ volumes:
   prefixSize: 1
   public: /nomad/fairdi/prod/fs/public
   staging: /nomad/fairdi/prod/fs/staging
+  north_home: /nomad/fairdi/prod/fs/north/users
   tmp: /nomad/fairdi/prod/fs/tmp
   nomad: /nomad
   archiveVersionSuffix: v1
@@ -78,6 +79,9 @@ services:
   jupyterhub:
     enabled: false
 
+north:
+  hubServiceApiTokenSecret: "nomad-hub-service-api-token"
+
 jupyterhub:
   fullnameOverride: "nomad-prod-v1-north"
   hub:
@@ -95,20 +99,4 @@ jupyterhub:
         userdata_url: https://nomad-lab.eu/fairdi/keycloak/auth/realms/fairdi_nomad_prod/protocol/openid-connect/userinfo
   singleuser:
     podNameTemplate: "nomad-prod-v1-north-{username}--{servername}"
-    initContainers:
-      - name: fix-permissions
-        image: busybox
-        imagePullPolicy: IfNotPresent
-        securityContext:
-          runAsUser: 0
-        command: ["/bin/sh", "-c", "chown 1000:1000 /home/jovyan"]
-        volumeMounts:
-          - mountPath: /home/jovyan
-            name: home
-            subPath: "{username}"
-    storage:
-      type: static
-      homeMountPath: /home/jovyan/work
-      static:
-        pvcName: nomad-prod-v1-north-home
-        subPath: "{username}"
+
diff --git a/ops/kubernetes/nomad/Chart.yaml b/ops/kubernetes/nomad/Chart.yaml
index 1451bc61843ed1f0798aa4c56bb546a9d3c9210d..5261d0fdba4f80025bf4ce2364f71fe66294b3e3 100644
--- a/ops/kubernetes/nomad/Chart.yaml
+++ b/ops/kubernetes/nomad/Chart.yaml
@@ -2,7 +2,7 @@ apiVersion: v2
 name: nomad
 description: A Helm chart for Kubernetes that only runs nomad services and uses externally hosted databases.
 type: application
-version: 0.0.1
+version: 0.0.2
 dependencies:
   - name: rabbitmq
     version: "11.2.2"
diff --git a/ops/kubernetes/nomad/templates/api-deployment.yaml b/ops/kubernetes/nomad/templates/api-deployment.yaml
index c1861402d381ddd8ab30cf1a18bebe1131e8514f..3504cc803134f7fdfaf17bbabc4b1ad247207c35 100644
--- a/ops/kubernetes/nomad/templates/api-deployment.yaml
+++ b/ops/kubernetes/nomad/templates/api-deployment.yaml
@@ -102,6 +102,8 @@ spec:
           name: public-volume
         - mountPath: /app/.volumes/fs/staging
           name: staging-volume
+        - mountPath: /app/.volumes/fs/north/users
+          name: north-home-volume
         - mountPath: /nomad
           name: nomad-volume
         env:
@@ -115,7 +117,7 @@ spec:
         - name: NOMAD_SERVICES_API_SECRET
           valueFrom:
             secretKeyRef:
-              name: {{ .Values.api.apiSecret}}
+              name: {{ .Values.api.apiSecret }}
               key: password
         {{ end }}
         {{ if .Values.keycloak.clientSecret }}
@@ -151,6 +153,13 @@ spec:
               name: {{ .Values.datacite.secret }}
               key: user
         {{ end }}
+        {{ if .Values.north.hubServiceApiTokenSecret }}
+        - name: NOMAD_NORTH_HUB_SERVICE_API_TOKEN
+          valueFrom:
+            secretKeyRef:
+              name: {{ .Values.north.hubServiceApiTokenSecret }}
+              key: token
+        {{ end }}
         command: ["/bin/sh", "run.sh"]
         livenessProbe:
           httpGet:
@@ -193,6 +202,10 @@ spec:
           path: {{ .Values.volumes.staging}}
           type: Directory
         {{ end }}
+      - name: north-home-volume
+        hostPath:
+          path: {{ .Values.volumes.north_home}}
+          type: Directory
       - name: nomad-volume
         hostPath:
           path: {{ .Values.volumes.nomad }}
diff --git a/ops/kubernetes/nomad/templates/nomad-configmap.yml b/ops/kubernetes/nomad/templates/nomad-configmap.yml
index 817ea8489c5ca9b3f0b6881dea66169c0afd4c8d..d00e2f68b8cd9fa8521c728c065c85ecbcba962b 100644
--- a/ops/kubernetes/nomad/templates/nomad-configmap.yml
+++ b/ops/kubernetes/nomad/templates/nomad-configmap.yml
@@ -33,6 +33,9 @@ data:
       index_individual_entries: {{ .Values.reprocess.indexIndividualEntries }}
     fs:
       tmp: ".volumes/fs/staging/tmp"
+      staging_external: {{ .Values.volumes.staging }}
+      public_external: {{ .Values.volumes.public }}
+      north_home_external: {{ .Values.volumes.north_home }}
       prefix_size: {{ .Values.volumes.prefixSize }}
       working_directory: /app
       {{ if .Values.volumes.archiveVersionSuffix }}
@@ -98,3 +101,8 @@ data:
     {{ if .Values.gui.config }}
     ui: {{ .Values.gui.config | toYaml | nindent 6 }}
     {{ end }}
+    north:
+      hub_host: "{{ .Values.proxy.external.host }}"
+      hub_port: {{ .Values.proxy.external.port }}
+      hub_service_api_token: "{{ .Values.north.hubServiceApiToken }}"
+
diff --git a/ops/kubernetes/nomad/values.yaml b/ops/kubernetes/nomad/values.yaml
index e8c0d3a200e6e0eadcf4364e385a1b5166b1ceb2..ac8bf0ab2c95a0e4de96fee7bcefd9b4a234c84d 100644
--- a/ops/kubernetes/nomad/values.yaml
+++ b/ops/kubernetes/nomad/values.yaml
@@ -163,6 +163,7 @@ volumes:
   prefixSize: 1
   public: /nomad/fairdi/latest/fs/public
   staging: /nomad/fairdi/latest/fs/staging
+  north_home: /nomad/fairdi/latest/fs/north/users
   tmp: /nomad/fairdi/latest/fs/tmp
   nomad: /nomad
 
@@ -191,13 +192,24 @@ services:
     ## enable aitoolkit references
     enabled: false
 
+north:
+  hubServiceApiToken: "secret-token"
+
 jupyterhub:
+  debug:
+    enabled: false
   # fullnameOverride: null
   # nameOverride: "north"
   proxy:
     service:
       type: ClusterIP
   hub:
+    extraEnv:
+      NOMAD_NORTH_HUB_SERVICE_API_TOKEN:
+        valueFrom:
+          secretKeyRef:
+            name: nomad-hub-service-api-token
+            key: token
     allowNamedServers: true
     shutdownOnLogout: true
     config:
@@ -216,6 +228,80 @@ jupyterhub:
         username_key: preferred_username
         userdata_params:
           state: state
+    extraConfig:
+      01-prespawn-hook.py: |
+        import os
+        import requests
+        import asyncio
+
+        hub_service_api_token = os.getenv('NOMAD_NORTH_HUB_SERVICE_API_TOKEN')
+
+        # configure nomad service
+        c.JupyterHub.services = [
+            {
+                "name": "nomad-service",
+                "admin": True,
+                "api_token": hub_service_api_token,
+            }
+        ]
+
+        async def pre_spawn_hook(spawner):
+            await spawner.load_user_options()
+            username = spawner.user.name
+
+            spawner.log.info(f"username: {username}")
+            spawner.log.debug(f'Configuring spawner for named server {spawner.name}')
+
+            if spawner.handler.current_user.name != 'nomad-service':
+                # Do nothing, will only launch the default image with no volumes.
+                # Only the nomad-service can launch specialized tools with mounted volumes
+                if spawner.name:
+                    spawner.log.error(f'The {spawner.name} server is not allowed to start this way, raise an error')
+                    raise NotImplementedError('Only the nomad-service can launch specialized tools.')
+                return
+
+            user_home = spawner.user_options.get('user_home')
+            spawner.log.info(f"user_home: {user_home}")
+            if user_home:
+                spawner.volumes.append({
+                    'name': 'user-home',
+                    'hostPath': {'path': user_home['host_path']}
+                })
+                spawner.volume_mounts.append({
+                    'name': 'user-home',
+                    'mountPath': user_home['mount_path'],
+                    'readOnly': False
+                })
+
+            uploads = spawner.user_options.get('uploads', [])
+            spawner.log.info(f"uploads: {uploads}")
+            for (i, upload) in enumerate(uploads):
+                spawner.volumes.append({
+                    'name': f"uploads-{i}",
+                    'hostPath': {'path': upload['host_path']}
+                })
+                spawner.volume_mounts.append({
+                    'name': f"uploads-{i}",
+                    'mountPath': upload['mount_path'],
+                    'readOnly': False
+                })
+
+            environment = spawner.user_options.get('environment', {})
+            spawner.environment.update(environment)
+
+            tool = spawner.user_options.get('tool')
+            if tool:
+                spawner.image = tool.get('image')
+                spawner.cmd = tool.get('cmd')
+
+                # Workaround for webtop based images (no connection to jupyterhub itself)
+                if tool.get('privileged'):
+                    spawner.privileged = True
+                    spawner.uid = 0
+
+        c.Spawner.pre_spawn_hook = pre_spawn_hook
+
+
   cull:
     enabled: true
     timeout: 3600