From c5e4d2d050bcd01e787cee8dcff7a2b0d9570bd2 Mon Sep 17 00:00:00 2001
From: David Sikter <david.sikter@physik.hu-berlin.de>
Date: Tue, 22 Nov 2022 09:21:22 +0000
Subject: [PATCH] Refactor: change terminology from installationUrl to
 deploymentUrl, and

---
 docs/oasis.md                                 |   6 +-
 gui/src/components/DataStore.js               | 146 +++++++++---------
 gui/src/components/archive/ArchiveBrowser.js  |  12 +-
 gui/src/components/archive/FileBrowser.js     |  46 +++---
 gui/src/components/archive/metainfo.js        |   6 +-
 .../editQuantity/ReferenceEditQuantity.js     |   6 +-
 .../components/entry/BrowseEntryFilesView.js  |   2 +-
 gui/src/components/uploads/CreateEntry.js     |   6 +-
 gui/src/components/uploads/UploadFilesView.js |   4 +-
 gui/src/utils.js                              | 112 +++++++-------
 gui/src/utils.spec.js                         |  88 +++++------
 nomad/app/v1/routers/uploads.py               |   2 +-
 nomad/config.py                               |   9 +-
 nomad/datamodel/context.py                    |   2 +-
 nomad/infrastructure.py                       |   2 +-
 nomad/processing/data.py                      |  24 +--
 .../configs/nomad.yaml                        |   2 +-
 .../nomad-oasis/configs/nomad.yaml            |   2 +-
 tests/app/v1/routers/test_uploads.py          |   6 +-
 tests/conftest.py                             |   2 +-
 tests/processing/test_data.py                 |   4 +-
 21 files changed, 245 insertions(+), 244 deletions(-)

diff --git a/docs/oasis.md b/docs/oasis.md
index b54e1c5671..f92e8b0b8f 100644
--- a/docs/oasis.md
+++ b/docs/oasis.md
@@ -164,8 +164,10 @@ NOMAD app and worker read a `nomad.yaml` for configuration.
 
 You should change the following:
 
-- Replace `localhost` with the hostname of your server. I user-management will redirect your users back to this host. Make sure this is the hostname, your users can use.
-- Replace `deployment`, `deployment_id`, and `maintainer_email` with representative values. The `deployment_id` should be the public hostname if you have any of your oasis.
+- Replace `localhost` with the hostname of your server. I user-management will redirect your
+users back to this host. Make sure this is the hostname, your users can use.
+- Replace `deployment`, `deployment_url`, and `maintainer_email` with representative values.
+The `deployment_url` should be the url to the deployment's api (should end with `/api`).
 - You can change `api_base_path` to run NOMAD under a different path prefix.
 - You should generate your own `north.jupyterhub_crypt_key`. You can generate one
 with `openssl rand -hex 32`.
diff --git a/gui/src/components/DataStore.js b/gui/src/components/DataStore.js
index ba44215448..566b818058 100644
--- a/gui/src/components/DataStore.js
+++ b/gui/src/components/DataStore.js
@@ -67,16 +67,16 @@ const DataStore = React.memo(({children}) => {
    * an object with default values, mostly undefined or nulls, will be returned). Note, it
    * does not cause the store to fetch any data, it just returns what's currently in the store.
    */
-  function getUpload(installationUrl, uploadId) {
+  function getUpload(deploymentUrl, uploadId) {
     if (!uploadId) return undefined
-    if (installationUrl !== apiBase) throw new Error('Fetching uploads from external installations is not yet supported')
+    if (deploymentUrl !== apiBase) throw new Error('Fetching uploads from external deployments is not yet supported')
     let uploadStoreObj = uploadStore.current[uploadId]
     if (!uploadStoreObj) {
       // Creates an initial, empty upload store object.
       uploadStoreObj = {
-        installationUrl, // ReadOnly
+        deploymentUrl, // ReadOnly
         uploadId, // ReadOnly
-        isExternal: installationUrl !== apiBase, // ReadOnly
+        isExternal: deploymentUrl !== apiBase, // ReadOnly
         deletionRequested: false, // Writable - If this upload has been sent for deletion
         upload: undefined, // Writeable - The last upload proc data fetched.
         entries: undefined, // ReadOnly - The last list of entries fetched by the store (when subscribing to an entry page).
@@ -100,8 +100,8 @@ const DataStore = React.memo(({children}) => {
         _subscriptions: [],
 
         // Convenience methods
-        updateUpload: (dataToUpdate) => { updateUpload(installationUrl, uploadId, dataToUpdate) },
-        requestRefreshUpload: () => { requestRefreshUpload(installationUrl, uploadId) }
+        updateUpload: (dataToUpdate) => { updateUpload(deploymentUrl, uploadId, dataToUpdate) },
+        requestRefreshUpload: () => { requestRefreshUpload(deploymentUrl, uploadId) }
       }
       uploadStore.current[uploadId] = uploadStoreObj
     }
@@ -112,9 +112,9 @@ const DataStore = React.memo(({children}) => {
    * Gets an upload from the store asychronously, waiting for the store to refresh if needed.
    * If the required data has already been fetched, we return the store object immediately.
    */
-  async function getUploadAsync(installationUrl, uploadId, requireUpload, requireEntriesPage) {
+  async function getUploadAsync(deploymentUrl, uploadId, requireUpload, requireEntriesPage) {
     if (!uploadId) return undefined
-    const uploadStoreObj = getUpload(installationUrl, uploadId)
+    const uploadStoreObj = getUpload(deploymentUrl, uploadId)
     if (uploadRefreshSatisfiesOptions(uploadStoreObj, requireUpload, requireEntriesPage)) {
       return uploadStoreObj // Store has already been refreshed with the required options
     }
@@ -126,7 +126,7 @@ const DataStore = React.memo(({children}) => {
           resolve(newStoreObj)
         }
       }
-      subscribeToUpload(installationUrl, uploadId, cb, requireUpload, requireEntriesPage)
+      subscribeToUpload(deploymentUrl, uploadId, cb, requireUpload, requireEntriesPage)
     })
   }
 
@@ -134,27 +134,27 @@ const DataStore = React.memo(({children}) => {
    * Subscribes the callback cb to an upload, and returns a function to be called to unsubscribe.
    * Typically used in useEffect. The callback will be called when the store value changes.
    */
-  function subscribeToUpload(installationUrl, uploadId, cb, requireUpload, requireEntriesPage) {
+  function subscribeToUpload(deploymentUrl, uploadId, cb, requireUpload, requireEntriesPage) {
     if (!uploadId) return undefined
     if (requireUpload === undefined || requireEntriesPage === undefined) {
       throw Error('Store error: missing upload subscription parameter')
     }
-    const uploadStoreObj = getUpload(installationUrl, uploadId)
+    const uploadStoreObj = getUpload(deploymentUrl, uploadId)
     // Update requestOptions
     uploadStoreObj.requestOptions.requireUpload = uploadStoreObj.requestOptions.requireUpload || requireUpload
     uploadStoreObj.requestOptions.requireEntriesPage = uploadStoreObj.requestOptions.requireEntriesPage || requireEntriesPage
     // Add subscription and trigger refresh if needed
     addSubscription(uploadStoreObj, cb)
-    initiateUploadRefreshIfNeeded(installationUrl, uploadId)
+    initiateUploadRefreshIfNeeded(deploymentUrl, uploadId)
     return function unsubscriber() { removeSubscription(uploadStore.current, uploadId, cb) }
   }
 
   /**
    * Updates the store upload with the specified data and notifies all subscribers.
    */
-  function updateUpload(installationUrl, uploadId, dataToUpdate) {
-    if (installationUrl !== apiBase) throw new Error('Cannot update external upload')
-    const oldStoreObj = getUpload(installationUrl, uploadId)
+  function updateUpload(deploymentUrl, uploadId, dataToUpdate) {
+    if (deploymentUrl !== apiBase) throw new Error('Cannot update external upload')
+    const oldStoreObj = getUpload(deploymentUrl, uploadId)
     const newStoreObj = {...oldStoreObj, ...dataToUpdate}
     // Compute derived values
     const user = userRef.current
@@ -193,12 +193,12 @@ const DataStore = React.memo(({children}) => {
       }
     }
     // Possibly, start a refresh job
-    initiateUploadRefreshIfNeeded(installationUrl, uploadId)
+    initiateUploadRefreshIfNeeded(deploymentUrl, uploadId)
   }
 
-  async function refreshUpload(installationUrl, uploadId) {
+  async function refreshUpload(deploymentUrl, uploadId) {
     // Internal use: refresh an upload store obj with data from the API.
-    const uploadStoreObj = getUpload(installationUrl, uploadId)
+    const uploadStoreObj = getUpload(deploymentUrl, uploadId)
     const refreshOptions = {...uploadStoreObj.requestOptions}
     const {requireUpload, requireEntriesPage} = refreshOptions
     if (!requireUpload && !requireEntriesPage) return
@@ -214,42 +214,42 @@ const DataStore = React.memo(({children}) => {
       const dataToUpdate = requireEntriesPage
         ? {error: undefined, isRefreshing: false, upload: apiData.response?.upload, entries: apiData.response?.data, apiData, pagination: currentPagination, refreshOptions}
         : {error: undefined, isRefreshing: false, upload: apiData.data, entries: undefined, apiData: undefined, refreshOptions}
-      updateUpload(installationUrl, uploadId, dataToUpdate)
+      updateUpload(deploymentUrl, uploadId, dataToUpdate)
     }).catch((error) => {
       if (requireEntriesPage && error.apiMessage === 'Page out of range requested.') {
         // Special case: can happen if entries have been deleted and the page we were on is no longer in range
         if (currentPagination && currentPagination.page !== 1) {
           // Rather than sending an update to all subscribers with an error, we first try
           // jumping to page 1 (will probably solve the problem)
-          getUpload(installationUrl, uploadId).pagination.page = 1
-          refreshUpload(installationUrl, uploadId)
+          getUpload(deploymentUrl, uploadId).pagination.page = 1
+          refreshUpload(deploymentUrl, uploadId)
           return
         }
       }
-      updateUpload(installationUrl, uploadId, {error: error, isRefreshing: false, refreshOptions})
+      updateUpload(deploymentUrl, uploadId, {error: error, isRefreshing: false, refreshOptions})
     })
   }
 
   /**
    * Use to nicely request a refresh of the upload store object.
    */
-  function requestRefreshUpload(installationUrl, uploadId) {
-    const uploadStoreObj = getUpload(installationUrl, uploadId)
+  function requestRefreshUpload(deploymentUrl, uploadId) {
+    const uploadStoreObj = getUpload(deploymentUrl, uploadId)
     if (!uploadStoreObj.isRefreshing) {
       // Refresh is not already in progress
-      refreshUpload(installationUrl, uploadId)
+      refreshUpload(deploymentUrl, uploadId)
     }
   }
 
-  async function initiateUploadRefreshIfNeeded(installationUrl, uploadId) {
+  async function initiateUploadRefreshIfNeeded(deploymentUrl, uploadId) {
     // Internal use: check if a refresh of the store is needed, and if so, initiate it.
-    let uploadStoreObj = getUpload(installationUrl, uploadId)
+    let uploadStoreObj = getUpload(deploymentUrl, uploadId)
     if (uploadStoreObj.isRefreshing) return // refresh already in progress
     if (uploadStoreObj.isProcessing) {
       // Upload is processing
       uploadStoreObj.isRefreshing = true // Signal start of a refresh
       await new Promise(resolve => setTimeout(resolve, 1000)) // wait one sec
-      uploadStoreObj = getUpload(installationUrl, uploadId)
+      uploadStoreObj = getUpload(deploymentUrl, uploadId)
     }
     // Determine if a refresh is needed or not
     const {requireUpload, requireEntriesPage} = uploadStoreObj.requestOptions
@@ -260,7 +260,7 @@ const DataStore = React.memo(({children}) => {
     const wrongPagination = requireEntriesPage && (pagIs?.page !== pag?.page || pagIs?.page_size !== pag.page_size)
     if (!uploadStoreObj.error && (uploadDataMissing || entryDataMissing || wrongPagination || uploadStoreObj.isProcessing)) {
       // Need to fetch data from the api
-      refreshUpload(installationUrl, uploadId)
+      refreshUpload(deploymentUrl, uploadId)
     } else {
       uploadStoreObj.isRefreshing = false
     }
@@ -271,14 +271,14 @@ const DataStore = React.memo(({children}) => {
    * an object with default values, mostly undefined or nulls, will be returned). Note, it
    * does not cause the store to fetch any data, it just returns what's currently in the store.
    */
-  function getEntry(installationUrl, entryId) {
+  function getEntry(deploymentUrl, entryId) {
     if (!entryId) return undefined
-    if (installationUrl !== apiBase) throw new Error('Fetching entries from external installations is not yet supported')
+    if (deploymentUrl !== apiBase) throw new Error('Fetching entries from external deployments is not yet supported')
     let entryStoreObj = entryStore.current[entryId]
     if (!entryStoreObj) {
       // Creates an initial, empty entry store object.
       entryStoreObj = {
-        installationUrl, // ReadOnly
+        deploymentUrl, // ReadOnly
         entryId: entryId, // ReadOnly
         uploadId: undefined, // ReadOnly - fetched by the store
         url: undefined, // ReadOnly - populated when uploadId fetched from the store
@@ -302,9 +302,9 @@ const DataStore = React.memo(({children}) => {
         _subscriptions: [],
 
         // Convenience methods
-        handleArchiveChanged: () => { handleArchiveChanged(installationUrl, entryId) },
-        saveArchive: () => { return saveArchive(installationUrl, entryId) },
-        reload: () => { requestRefreshEntry(installationUrl, entryId) }
+        handleArchiveChanged: () => { handleArchiveChanged(deploymentUrl, entryId) },
+        saveArchive: () => { return saveArchive(deploymentUrl, entryId) },
+        reload: () => { requestRefreshEntry(deploymentUrl, entryId) }
       }
       entryStore.current[entryId] = entryStoreObj
     }
@@ -315,9 +315,9 @@ const DataStore = React.memo(({children}) => {
    * Gets an entry from the store asychronously, waiting for the store to refresh if needed.
    * If the required data has already been fetched, we return the store object immediately.
    */
-  async function getEntryAsync(installationUrl, entryId, requireMetadata, requireArchive) {
+  async function getEntryAsync(deploymentUrl, entryId, requireMetadata, requireArchive) {
     if (!entryId) return undefined
-    const entryStoreObj = getEntry(installationUrl, entryId)
+    const entryStoreObj = getEntry(deploymentUrl, entryId)
     if (entryRefreshSatisfiesOptions(entryStoreObj, requireMetadata, requireArchive)) {
       return entryStoreObj // Store has already been refreshed with the required options
     }
@@ -329,7 +329,7 @@ const DataStore = React.memo(({children}) => {
           resolve(newStoreObj)
         }
       }
-      subscribeToEntry(installationUrl, entryId, cb, requireMetadata, requireArchive)
+      subscribeToEntry(deploymentUrl, entryId, cb, requireMetadata, requireArchive)
     })
   }
 
@@ -337,30 +337,30 @@ const DataStore = React.memo(({children}) => {
    * Subscribes the callback cb to an entry, and returns a function to be called to unsubscribe.
    * Typically used in useEffect. The callback will be called when the store value changes.
    */
-  function subscribeToEntry(installationUrl, entryId, cb, requireMetadata, requireArchive) {
+  function subscribeToEntry(deploymentUrl, entryId, cb, requireMetadata, requireArchive) {
     if (!entryId) return undefined
     if (requireMetadata === undefined || !(requireArchive === undefined || requireArchive === '*' || typeof requireArchive === 'object')) {
       throw Error('Store error: bad subscription parameter supplied')
     }
-    const entryStoreObj = getEntry(installationUrl, entryId)
+    const entryStoreObj = getEntry(deploymentUrl, entryId)
     // Update requestOptions
     entryStoreObj.requestOptions.requireMetadata = entryStoreObj.requestOptions.requireMetadata || requireMetadata
     entryStoreObj.requestOptions.requireArchive = mergeArchiveFilter(
       entryStoreObj.requestOptions.requireArchive, requireArchive)
     // Add subscription and trigger refresh if needed
     addSubscription(entryStoreObj, cb)
-    initiateEntryRefreshIfNeeded(installationUrl, entryId)
+    initiateEntryRefreshIfNeeded(deploymentUrl, entryId)
     return function unsubscriber() { removeSubscription(entryStore.current, entryId, cb) }
   }
 
   /**
    * Updates the store entry with the specified data and notifies all subscribers.
    */
-  function updateEntry(installationUrl, entryId, dataToUpdate) {
-    const oldStoreObj = getEntry(installationUrl, entryId)
+  function updateEntry(deploymentUrl, entryId, dataToUpdate) {
+    const oldStoreObj = getEntry(deploymentUrl, entryId)
     const newStoreObj = {...oldStoreObj, ...dataToUpdate}
     // Compute derived values not set by the refreshEntry method
-    newStoreObj.url = newStoreObj.uploadId ? `${installationUrl}/uploads/${newStoreObj.uploadId}/archive/${entryId}` : undefined
+    newStoreObj.url = newStoreObj.uploadId ? `${deploymentUrl}/uploads/${newStoreObj.uploadId}/archive/${entryId}` : undefined
     newStoreObj.exists = newStoreObj?.error?.name !== 'DoesNotExist'
     newStoreObj.archiveHasChanges = newStoreObj.archiveVersion !== newStoreObj.savedArchiveVersion
 
@@ -381,12 +381,12 @@ const DataStore = React.memo(({children}) => {
       raiseError(newStoreObj.error)
     }
     // Possibly, start a refresh job
-    initiateEntryRefreshIfNeeded(installationUrl, entryId)
+    initiateEntryRefreshIfNeeded(deploymentUrl, entryId)
   }
 
-  async function refreshEntry(installationUrl, entryId) {
+  async function refreshEntry(deploymentUrl, entryId) {
     // Internal use: refresh an entry store obj with data from the API.
-    let entryStoreObj = getEntry(installationUrl, entryId)
+    let entryStoreObj = getEntry(deploymentUrl, entryId)
     let refreshOptions = {...entryStoreObj.requestOptions}
     let {requireMetadata, requireArchive} = refreshOptions
     if (!requireMetadata && !requireArchive) return
@@ -401,11 +401,11 @@ const DataStore = React.memo(({children}) => {
         const user = userRef.current
         const isWriter = user && metadata?.writers && metadata.writers.find(u => u.user_id === user.sub)
         const isEditableArchive = metadata && !metadata.published && metadata.quantities && metadata.quantities.includes('data')
-        const editable = isWriter && isEditableArchive && selectedEntry.current === `${installationUrl}:${entryId}`
+        const editable = isWriter && isEditableArchive && selectedEntry.current === `${deploymentUrl}:${entryId}`
         const isProcessing = !!metadata?.process_running
         Object.assign(dataToUpdate, {metadataApiData, metadata, uploadId, editable, isProcessing, error: undefined})
         // Fetch the options again, in case some subscriptions were added while waiting for the api call
-        entryStoreObj = getEntry(installationUrl, entryId)
+        entryStoreObj = getEntry(deploymentUrl, entryId)
         refreshOptions = {...entryStoreObj.requestOptions}
         requireArchive = refreshOptions.requireArchive
         dataToUpdate.refreshOptions.requireArchive = requireArchive
@@ -428,36 +428,36 @@ const DataStore = React.memo(({children}) => {
       dataToUpdate.error = error
     }
     dataToUpdate.isRefreshing = false
-    updateEntry(installationUrl, entryId, dataToUpdate)
+    updateEntry(deploymentUrl, entryId, dataToUpdate)
   }
 
   /**
    * Use to nicely request a refresh of the entry store object.
    */
-  function requestRefreshEntry(installationUrl, entryId) {
-    const entryStoreObj = getEntry(installationUrl, entryId)
+  function requestRefreshEntry(deploymentUrl, entryId) {
+    const entryStoreObj = getEntry(deploymentUrl, entryId)
     if (!entryStoreObj.isRefreshing) {
       // Refresh is not already in progress
-      refreshEntry(installationUrl, entryId)
+      refreshEntry(deploymentUrl, entryId)
     }
   }
 
-  async function initiateEntryRefreshIfNeeded(installationUrl, entryId) {
+  async function initiateEntryRefreshIfNeeded(deploymentUrl, entryId) {
     // Internal use: check if a refresh of the store is needed, and if so, initiate it.
-    let entryStoreObj = getEntry(installationUrl, entryId)
+    let entryStoreObj = getEntry(deploymentUrl, entryId)
     if (entryStoreObj.isRefreshing) return // refresh already in progress
     if (entryStoreObj.isProcessing) {
       // Entry is processing
       entryStoreObj.isRefreshing = true // Signal start of a refresh
       await new Promise(resolve => setTimeout(resolve, 1000)) // wait one sec
-      entryStoreObj = getEntry(installationUrl, entryId)
+      entryStoreObj = getEntry(deploymentUrl, entryId)
     }
     // Determine if a refresh is needed or not
     const {requireMetadata, requireArchive} = entryStoreObj.requestOptions
     const lastRefreshSatisfiesOptions = entryRefreshSatisfiesOptions(entryStoreObj, requireMetadata, requireArchive)
     if (!entryStoreObj.error && (!lastRefreshSatisfiesOptions || entryStoreObj.isProcessing)) {
       // Need to fetch data from the api
-      refreshEntry(installationUrl, entryId)
+      refreshEntry(deploymentUrl, entryId)
     } else {
       entryStoreObj.isRefreshing = false
     }
@@ -466,8 +466,8 @@ const DataStore = React.memo(({children}) => {
   /**
    * Used to save the archive and trigger a store refresh.
    */
-  function saveArchive(installationUrl, entryId) {
-    const {uploadId, metadata, archive, archiveVersion} = getEntry(installationUrl, entryId)
+  function saveArchive(deploymentUrl, entryId) {
+    const {uploadId, metadata, archive, archiveVersion} = getEntry(deploymentUrl, entryId)
     const {mainfile} = metadata
     if (uploadId) {
       const separatorIndex = mainfile.lastIndexOf('/')
@@ -489,7 +489,7 @@ const DataStore = React.memo(({children}) => {
       return new Promise((resolve, reject) => {
         api.put(`/uploads/${uploadId}/raw/${path}?file_name=${fileName}&wait_for_processing=true&entry_hash=${archive.metadata.entry_hash}`, stringifiedArchive || newArchive, config)
           .then(response => {
-            requestRefreshEntry(installationUrl, entryId)
+            requestRefreshEntry(deploymentUrl, entryId)
             resolve()
           })
           .catch(error => {
@@ -499,7 +499,7 @@ const DataStore = React.memo(({children}) => {
               raiseError(error)
             }
           })
-        updateEntry(installationUrl, entryId, {savedArchiveVersion: archiveVersion})
+        updateEntry(deploymentUrl, entryId, {savedArchiveVersion: archiveVersion})
       })
     }
   }
@@ -507,9 +507,9 @@ const DataStore = React.memo(({children}) => {
   /**
    * Call to signal that the archive has been manually edited.
    */
-  function handleArchiveChanged(installationUrl, entryId) {
-    const {archiveVersion} = getEntry(installationUrl, entryId)
-    updateEntry(installationUrl, entryId, {archiveVersion: archiveVersion + 1})
+  function handleArchiveChanged(deploymentUrl, entryId) {
+    const {archiveVersion} = getEntry(deploymentUrl, entryId)
+    updateEntry(deploymentUrl, entryId, {archiveVersion: archiveVersion + 1})
   }
 
   /**
@@ -589,7 +589,7 @@ const DataStore = React.memo(({children}) => {
     if (url.versionHash) {
       return url
     } else if (url.entryId) {
-      return createEntryUrl(url.installationUrl, url.uploadId, url.entryId, '/definitions')
+      return createEntryUrl(url.deploymentUrl, url.uploadId, url.entryId, '/definitions')
     } else if (url.qualifiedName) {
       return systemMetainfoUrl
     }
@@ -606,7 +606,7 @@ const DataStore = React.memo(({children}) => {
     if (parsedMetainfoBaseUrl.versionHash) {
       const frozenMetainfo = await api.get(`/metainfo/${parsedMetainfoBaseUrl.versionHash}`)
       frozenMetainfo._url = createEntryUrl(
-        parsedMetainfoBaseUrl.installationUrl,
+        parsedMetainfoBaseUrl.deploymentUrl,
         'todo-uploadid',
         'todo-entryid',
         `definitions@${frozenMetainfo.data.definition_id}`)
@@ -617,7 +617,7 @@ const DataStore = React.memo(({children}) => {
       return frozenMetainfo
     } else if (parsedMetainfoBaseUrl.entryId) {
       const entryStoreObj = await getEntryAsync(
-        parsedMetainfoBaseUrl.installationUrl, parsedMetainfoBaseUrl.entryId, false, metainfoArchiveFilter)
+        parsedMetainfoBaseUrl.deploymentUrl, parsedMetainfoBaseUrl.entryId, false, metainfoArchiveFilter)
       if (entryStoreObj.error) {
         throw new Error(`Error fetching entry ${parsedMetainfoBaseUrl.entryId}: ${entryStoreObj.error}`)
       } else if (!entryStoreObj.archive?.definitions) {
@@ -722,11 +722,11 @@ export default DataStore
  *      b) '*' (load entire archive), or
  *      c) an object specifying a simple archive data filter (see the doc for mergeArchiveFilter).
  */
-export function useEntryStoreObj(installationUrl, entryId, requireMetadata, requireArchive) {
+export function useEntryStoreObj(deploymentUrl, entryId, requireMetadata, requireArchive) {
   const dataStore = useDataStore()
   const [entryStoreObj, setEntryStoreObj] = useState(
-    () => installationUrl && entryId
-      ? filteredEntryStoreObj(dataStore.getEntry(installationUrl, entryId), requireMetadata, requireArchive)
+    () => deploymentUrl && entryId
+      ? filteredEntryStoreObj(dataStore.getEntry(deploymentUrl, entryId), requireMetadata, requireArchive)
       : null)
 
   const onEntryStoreUpdated = useCallback((oldStoreObj, newStoreObj) => {
@@ -734,10 +734,10 @@ export function useEntryStoreObj(installationUrl, entryId, requireMetadata, requ
   }, [setEntryStoreObj, requireMetadata, requireArchive])
 
   useEffect(() => {
-    if (installationUrl && entryId) {
-      return dataStore.subscribeToEntry(installationUrl, entryId, onEntryStoreUpdated, requireMetadata, requireArchive)
+    if (deploymentUrl && entryId) {
+      return dataStore.subscribeToEntry(deploymentUrl, entryId, onEntryStoreUpdated, requireMetadata, requireArchive)
     }
-  }, [installationUrl, entryId, requireMetadata, requireArchive, dataStore, onEntryStoreUpdated])
+  }, [deploymentUrl, entryId, requireMetadata, requireArchive, dataStore, onEntryStoreUpdated])
 
   return entryStoreObj
 }
diff --git a/gui/src/components/archive/ArchiveBrowser.js b/gui/src/components/archive/ArchiveBrowser.js
index 6d60660bef..0c3279eb6b 100644
--- a/gui/src/components/archive/ArchiveBrowser.js
+++ b/gui/src/components/archive/ArchiveBrowser.js
@@ -77,7 +77,7 @@ export const configState = atom({
 
 const ArchiveBrowser = React.memo(({url}) => {
   const parsedUrl = useMemo(() => parseNomadUrl(url), [url])
-  const {archive} = useEntryStoreObj(parsedUrl.installationUrl, parsedUrl.entryId, false, '*')
+  const {archive} = useEntryStoreObj(parsedUrl.deploymentUrl, parsedUrl.entryId, false, '*')
   const metainfo = useMetainfo(systemMetainfoUrl)
   const rootSectionDef = metainfo ? metainfo.getEntryArchiveDefinition() : null
 
@@ -430,7 +430,7 @@ class ArchiveAdaptor extends Adaptor {
     this.api = api
     this.dataStore = dataStore
     const {editable} = await dataStore.getEntryAsync(
-      this.parsedObjUrl.installationUrl, this.parsedObjUrl.entryId, false, '*')
+      this.parsedObjUrl.deploymentUrl, this.parsedObjUrl.entryId, false, '*')
     this.entryIsEditable = editable
   }
 
@@ -539,7 +539,7 @@ class SectionAdaptor extends ArchiveAdaptor {
         try {
           const resolvedUrl = resolveNomadUrl(reference, this.parsedObjUrl)
           if (resolvedUrl.type === refType.archive) {
-            const {archive} = await this.dataStore.getEntryAsync(resolvedUrl.installationUrl, resolvedUrl.entryId, false, '*')
+            const {archive} = await this.dataStore.getEntryAsync(resolvedUrl.deploymentUrl, resolvedUrl.entryId, false, '*')
             const resolvedDef = property.type._referencedDefinition
             const resolvedObj = resolveInternalRef('/' + (resolvedUrl.path || ''), archive)
             return this.adaptorFactory(resolvedUrl, resolvedObj, resolvedDef)
@@ -555,10 +555,10 @@ class SectionAdaptor extends ArchiveAdaptor {
       // Regular quantities
       if (property.m_annotations?.browser) {
         if (property.m_annotations.browser[0].adaptor === 'RawFileAdaptor') {
-          const installationUrl = this.parsedObjUrl.installationUrl
+          const deploymentUrl = this.parsedObjUrl.deploymentUrl
           const uploadId = this.parsedObjUrl.uploadId
           const path = this.obj[property.name]
-          const uploadUrl = createUploadUrl(installationUrl, uploadId, path)
+          const uploadUrl = createUploadUrl(deploymentUrl, uploadId, path)
           return new RawFileAdaptor(uploadUrl, null, false)
         }
       }
@@ -761,7 +761,7 @@ const InheritingSections = React.memo(function InheritingSections({def, section,
   }, [dataStore, def])
 
   const getSelectionValue = useCallback((def) => {
-    return getUrlFromDefinition(def, {installationUrl: apiBase}, true)
+    return getUrlFromDefinition(def, {deploymentUrl: apiBase}, true)
   }, [])
 
   const showSelection = useMemo(() => {
diff --git a/gui/src/components/archive/FileBrowser.js b/gui/src/components/archive/FileBrowser.js
index 15379db65b..f96e863862 100644
--- a/gui/src/components/archive/FileBrowser.js
+++ b/gui/src/components/archive/FileBrowser.js
@@ -67,7 +67,7 @@ class RawDirectoryAdaptor extends Adaptor {
     const parsedUrl = parseNomadUrl(uploadUrl)
     if (parsedUrl.type !== refType.upload) throw new Error(`Expected an upload url, got ${uploadUrl}`)
     if (!parsedUrl.isResolved) throw new Error(`Absolute url required, got ${uploadUrl}`)
-    this.installationUrl = parsedUrl.installationUrl
+    this.deploymentUrl = parsedUrl.deploymentUrl
     this.uploadUrl = uploadUrl
     this.uploadId = parsedUrl.uploadId
     this.path = parsedUrl.path
@@ -88,7 +88,7 @@ class RawDirectoryAdaptor extends Adaptor {
   async initialize(api, dataStore) {
     this.api = api
     this.lastPage = 1
-    const uploadStoreObj = await dataStore.getUploadAsync(this.installationUrl, this.uploadId, true, false)
+    const uploadStoreObj = await dataStore.getUploadAsync(this.deploymentUrl, this.uploadId, true, false)
     this.timestamp = uploadStoreObj.upload?.complete_time
     this.editable = uploadStoreObj.isEditable
     await this.fetchData()
@@ -98,7 +98,7 @@ class RawDirectoryAdaptor extends Adaptor {
   }
   async fetchData() {
     const encodedPath = urlEncodePath(this.path)
-    if (this.installationUrl !== apiBase) throw new Error('Fetching directory data from external source is not yet supported')
+    if (this.deploymentUrl !== apiBase) throw new Error('Fetching directory data from external source is not yet supported')
     const response = await this.api.get(`/uploads/${this.uploadId}/rawdir/${encodedPath}?include_entry_info=true&page_size=${this.page_size}&page=${this.lastPage}`)
     const elementsByName = this.data?.elementsByName || {}
     response.directory_metadata.content.forEach(element => { elementsByName[element.name] = element })
@@ -154,7 +154,7 @@ class RawDirectoryAdaptor extends Adaptor {
 
   render() {
     return <RawDirectoryContent
-      installationUrl={this.installationUrl} uploadId={this.uploadId} path={this.path}
+      deploymentUrl={this.deploymentUrl} uploadId={this.uploadId} path={this.path}
       title={this.title} highlightedItem={this.highlightedItem}
       editable={this.editable}/>
   }
@@ -233,7 +233,7 @@ const useRawDirectoryContentStyles = makeStyles(theme => ({
     backgroundColor: theme.palette.grey[300]
   }
 }))
-function RawDirectoryContent({installationUrl, uploadId, path, title, highlightedItem, editable}) {
+function RawDirectoryContent({deploymentUrl, uploadId, path, title, highlightedItem, editable}) {
   const classes = useRawDirectoryContentStyles()
   const dataStore = useDataStore()
   const entryPageMainFile = useEntryPageContext()?.metadata?.mainfile // Will be set if we're on an entry page
@@ -268,9 +268,9 @@ function RawDirectoryContent({installationUrl, uploadId, path, title, highlighte
   }, [browser, lane, uploadId])
 
   useEffect(() => {
-    refreshIfNeeded(undefined, dataStore.getUpload(installationUrl, uploadId))
-    return dataStore.subscribeToUpload(installationUrl, uploadId, refreshIfNeeded, true, false)
-  }, [dataStore, installationUrl, uploadId, refreshIfNeeded])
+    refreshIfNeeded(undefined, dataStore.getUpload(deploymentUrl, uploadId))
+    return dataStore.subscribeToUpload(deploymentUrl, uploadId, refreshIfNeeded, true, false)
+  }, [dataStore, deploymentUrl, uploadId, refreshIfNeeded])
 
   const handleDropFiles = useCallback((files) => {
     // Handles dropping files (not links)
@@ -279,9 +279,9 @@ function RawDirectoryContent({installationUrl, uploadId, path, title, highlighte
       formData.append('file', file)
     }
     api.put(`/uploads/${uploadId}/raw/${encodedPath}`, formData)
-      .then(response => dataStore.updateUpload(installationUrl, uploadId, {upload: response.data}))
+      .then(response => dataStore.updateUpload(deploymentUrl, uploadId, {upload: response.data}))
       .catch(error => raiseError(error))
-  }, [installationUrl, uploadId, encodedPath, raiseError, api, dataStore])
+  }, [deploymentUrl, uploadId, encodedPath, raiseError, api, dataStore])
 
   const handleDrop = useCallback(async (e) => {
     // Handles dropping files and links
@@ -301,7 +301,7 @@ function RawDirectoryContent({installationUrl, uploadId, path, title, highlighte
   const handleCopyMoveFile = (e) => {
     setOpenCopyMoveDialog(false)
     api.put(`/uploads/${uploadId}/raw/${encodedPath}?copy_or_move=${e.moveFile}&copy_or_move_source_path=${fileName}&file_name=${copyFileName.current.value}`)
-      .then(response => dataStore.updateUpload(installationUrl, uploadId, {upload: response.data}))
+      .then(response => dataStore.updateUpload(deploymentUrl, uploadId, {upload: response.data}))
       .catch(error => raiseError(error))
   }
 
@@ -311,7 +311,7 @@ function RawDirectoryContent({installationUrl, uploadId, path, title, highlighte
     if (dirName) {
       const fullPath = urlJoin(encodedPath, encodeURIComponent(dirName))
       api.post(`/uploads/${uploadId}/raw-create-dir/${fullPath}`)
-        .then(response => dataStore.updateUpload(installationUrl, uploadId, {upload: response.data}))
+        .then(response => dataStore.updateUpload(deploymentUrl, uploadId, {upload: response.data}))
         .catch(raiseError)
     }
   }
@@ -327,7 +327,7 @@ function RawDirectoryContent({installationUrl, uploadId, path, title, highlighte
           const gotoLane = lane.index > 0 ? lane.index - 1 : 0
           history.push(browser.lanes.current[gotoLane].path)
         }
-        dataStore.updateUpload(installationUrl, uploadId, {upload: response.data})
+        dataStore.updateUpload(deploymentUrl, uploadId, {upload: response.data})
       })
       .catch(raiseError)
   }
@@ -338,7 +338,7 @@ function RawDirectoryContent({installationUrl, uploadId, path, title, highlighte
     return <Content key={path}><Typography>loading ...</Typography></Content>
   } else {
     // Data loaded
-    const downloadUrl = `uploads/${uploadId}/raw/${encodedPath}?compress=true` // TODO: installationUrl need to be considered for external uploads
+    const downloadUrl = `uploads/${uploadId}/raw/${encodedPath}?compress=true` // TODO: deploymentUrl need to be considered for external uploads
     return (
       <CustomDropZone onDrop={handleDrop} classNameNormal={classes.dropzoneLane} classNameActive={classes.dropzoneActive}>
         <Content key={path}>
@@ -475,7 +475,7 @@ function RawDirectoryContent({installationUrl, uploadId, path, title, highlighte
   }
 }
 RawDirectoryContent.propTypes = {
-  installationUrl: PropTypes.string.isRequired,
+  deploymentUrl: PropTypes.string.isRequired,
   uploadId: PropTypes.string.isRequired,
   path: PropTypes.string.isRequired,
   title: PropTypes.string.isRequired,
@@ -493,7 +493,7 @@ export class RawFileAdaptor extends Adaptor {
     const parsedUrl = parseNomadUrl(uploadUrl)
     if (parsedUrl.type !== refType.upload) throw new Error(`Expected an upload url, got ${uploadUrl}`)
     if (!parsedUrl.isResolved) throw new Error(`Absolute url required, got ${uploadUrl}`)
-    this.installationUrl = parsedUrl.installationUrl
+    this.deploymentUrl = parsedUrl.deploymentUrl
     this.uploadId = parsedUrl.uploadId
     this.path = parsedUrl.path
     this.data = data
@@ -512,8 +512,8 @@ export class RawFileAdaptor extends Adaptor {
   }
   async itemAdaptor(key) {
     if (key === 'archive') {
-      const archiveUrl = createEntryUrl(this.installationUrl, this.uploadId, this.data.entry_id)
-      const {archive} = await this.dataStore.getEntryAsync(this.installationUrl, this.data.entry_id, false, '*')
+      const archiveUrl = createEntryUrl(this.deploymentUrl, this.uploadId, this.data.entry_id)
+      const {archive} = await this.dataStore.getEntryAsync(this.deploymentUrl, this.data.entry_id, false, '*')
       const metainfo = await this.dataStore.getMetainfoAsync(systemMetainfoUrl)
       const rootSectionDef = metainfo.getEntryArchiveDefinition()
       return archiveAdaptorFactory(archiveUrl, archive, rootSectionDef)
@@ -523,7 +523,7 @@ export class RawFileAdaptor extends Adaptor {
   }
   render() {
     return <RawFileContent
-      installationUrl={this.installationUrl} uploadId={this.uploadId} path={this.path}
+      deploymentUrl={this.deploymentUrl} uploadId={this.uploadId} path={this.path}
       data={this.data} editable={this.editable} key={this.path}/>
   }
 }
@@ -542,7 +542,7 @@ class FilePreviewAdaptor extends Adaptor {
   }
 }
 
-function RawFileContent({installationUrl, uploadId, path, data, editable}) {
+function RawFileContent({deploymentUrl, uploadId, path, data, editable}) {
   const entryPageMainFile = useEntryPageContext()?.metadata?.mainfile // Will be set if we're on an entry page
   const browser = useContext(browserContext)
   const lane = useContext(laneContext)
@@ -552,7 +552,7 @@ function RawFileContent({installationUrl, uploadId, path, data, editable}) {
   const { raiseError } = useErrors()
   const [openConfirmDeleteFileDialog, setOpenConfirmDeleteFileDialog] = useState(false)
   const encodedPath = urlEncodePath(path)
-  const downloadUrl = `uploads/${uploadId}/raw/${encodedPath}?ignore_mime_type=true` // TODO: installationUrl need to be considered for external uploads
+  const downloadUrl = `uploads/${uploadId}/raw/${encodedPath}?ignore_mime_type=true` // TODO: deploymentUrl need to be considered for external uploads
   const allNorthTools = useTools()
   const applicableNorthTools = useMemo(() => {
     const fileExtension = path.split('.').pop().toLowerCase()
@@ -574,7 +574,7 @@ function RawFileContent({installationUrl, uploadId, path, data, editable}) {
           const gotoLane = lane.index > 0 ? lane.index - 1 : 0
           history.push(browser.lanes.current[gotoLane].path)
         }
-        dataStore.updateUpload(installationUrl, uploadId, {upload: response.data})
+        dataStore.updateUpload(deploymentUrl, uploadId, {upload: response.data})
       })
       .catch(raiseError)
   }
@@ -679,7 +679,7 @@ function RawFileContent({installationUrl, uploadId, path, data, editable}) {
     </Content>)
 }
 RawFileContent.propTypes = {
-  installationUrl: PropTypes.string.isRequired,
+  deploymentUrl: PropTypes.string.isRequired,
   uploadId: PropTypes.string.isRequired,
   path: PropTypes.string.isRequired,
   data: PropTypes.object.isRequired,
diff --git a/gui/src/components/archive/metainfo.js b/gui/src/components/archive/metainfo.js
index 2d49c88186..fa05508df0 100644
--- a/gui/src/components/archive/metainfo.js
+++ b/gui/src/components/archive/metainfo.js
@@ -660,7 +660,7 @@ export function removeSubSection(section, subSectionDef, index) {
 
 /**
  * Given a definition, compute its url (string). Optionally, you can specify relativeTo, an
- * object of the form {installationUrl, uploadId, entryId} (containing the first, the two first,
+ * object of the form {deploymentUrl, uploadId, entryId} (containing the first, the two first,
  * or all three atributes, depending on what you want the url to be relative to). If relativeTo
  * is left out, we return an absolute url. You may also specify preferMainfile = true if you
  * want the url to use the mainfile rather than the entryId when possible (more humanly readable).
@@ -668,7 +668,7 @@ export function removeSubSection(section, subSectionDef, index) {
  export function getUrlFromDefinition(definition, relativeTo = null, preferMainfile = false) {
   const pkg = definition.m_def === PackageMDef ? definition : definition._package
   const metainfo = pkg._pkgParentData._metainfo
-  if (!metainfo._parsedUrl.entryId && relativeTo?.installationUrl === metainfo._parsedUrl.installationUrl) {
+  if (!metainfo._parsedUrl.entryId && relativeTo?.deploymentUrl === metainfo._parsedUrl.deploymentUrl) {
     return definition._qualifiedName
   }
   let parentUrl
@@ -680,7 +680,7 @@ export function removeSubSection(section, subSectionDef, index) {
         let rv
         if (relativeTo) {
           rv = relativizeNomadUrl(
-            metainfo._parsedUrl, relativeTo.installationUrl, relativeTo.uploadId, relativeTo.entryId)
+            metainfo._parsedUrl, relativeTo.deploymentUrl, relativeTo.uploadId, relativeTo.entryId)
         } else {
           rv = metainfo._url
         }
diff --git a/gui/src/components/editQuantity/ReferenceEditQuantity.js b/gui/src/components/editQuantity/ReferenceEditQuantity.js
index 515cf50ccc..7faa75c378 100644
--- a/gui/src/components/editQuantity/ReferenceEditQuantity.js
+++ b/gui/src/components/editQuantity/ReferenceEditQuantity.js
@@ -45,7 +45,7 @@ function getReferencedSection(quantityDef) {
 const ReferenceEditQuantity = React.memo(function ReferenceEditQuantity(props) {
   const styles = useStyles()
   const dataStore = useDataStore()
-  const {installationUrl, uploadId, archive, url} = useEntryPageContext('*')
+  const {deploymentUrl, uploadId, archive, url} = useEntryPageContext('*')
   const {quantityDef, value, onChange, index} = props
   const [entry, setEntry] = useState(null)
   const {api} = useApi()
@@ -155,7 +155,7 @@ const ReferenceEditQuantity = React.memo(function ReferenceEditQuantity(props) {
   const createNewEntry = useCallback((fileName) => {
     const archive = {
       data: {
-        m_def: getUrlFromDefinition(getReferencedSection(quantityDef), {installationUrl, uploadId}, true)
+        m_def: getUrlFromDefinition(getReferencedSection(quantityDef), {deploymentUrl, uploadId}, true)
       }
     }
     return new Promise((resolve, reject) => {
@@ -176,7 +176,7 @@ const ReferenceEditQuantity = React.memo(function ReferenceEditQuantity(props) {
           reject(new Error(error))
         })
     })
-  }, [api, quantityDef, installationUrl, uploadId])
+  }, [api, quantityDef, deploymentUrl, uploadId])
 
   const handleValueChange = useCallback((event, value) => {
     if (value?.createNewEntry) {
diff --git a/gui/src/components/entry/BrowseEntryFilesView.js b/gui/src/components/entry/BrowseEntryFilesView.js
index cd6a609866..b60e75f99c 100644
--- a/gui/src/components/entry/BrowseEntryFilesView.js
+++ b/gui/src/components/entry/BrowseEntryFilesView.js
@@ -68,7 +68,7 @@ const BrowseEntryFilesView = React.memo((props) => {
     const mainfileBasename = data.mainfile.split('/').pop()
     return <Page>
       <FileBrowser
-        uploadUrl={createUploadUrl(apiBase, data.upload_id, mainfileDirname)} // TODO: installationUrl should come from entry context
+        uploadUrl={createUploadUrl(apiBase, data.upload_id, mainfileDirname)} // TODO: deploymentUrl should come from entry context
         rootTitle="Entry files"
         highlightedItem={mainfileBasename}
       />
diff --git a/gui/src/components/uploads/CreateEntry.js b/gui/src/components/uploads/CreateEntry.js
index c91bf6f26c..edadab786a 100644
--- a/gui/src/components/uploads/CreateEntry.js
+++ b/gui/src/components/uploads/CreateEntry.js
@@ -9,7 +9,7 @@ import { getUrlFromDefinition, SectionMDef, useGlobalMetainfo } from '../archive
 import { useUploadPageContext } from './UploadPageContext'
 
 const CreateEntry = React.memo(function CreateEntry(props) {
-  const {installationUrl, uploadId, isProcessing} = useUploadPageContext()
+  const {deploymentUrl, uploadId, isProcessing} = useUploadPageContext()
   const {api} = useApi()
   const {raiseError} = useErrors()
   const globalMetainfo = useGlobalMetainfo()
@@ -73,7 +73,7 @@ const CreateEntry = React.memo(function CreateEntry(props) {
         const newTemplates = getTemplatesFromDefinitions(
           archive.definitions.section_definitions, archive.metadata.entry_id, archive,
           section => {
-            return getUrlFromDefinition(section, {installationUrl, uploadId}, true)
+            return getUrlFromDefinition(section, {deploymentUrl, uploadId}, true)
           })
         newTemplates.forEach(template => {
           templates.push(template)
@@ -87,7 +87,7 @@ const CreateEntry = React.memo(function CreateEntry(props) {
     }
 
     getTemplates().then(setTemplates).catch(raiseError)
-  }, [api, raiseError, setTemplates, globalMetainfo, isProcessing, installationUrl, uploadId])
+  }, [api, raiseError, setTemplates, globalMetainfo, isProcessing, deploymentUrl, uploadId])
 
   const handleAdd = useCallback(() => {
     api.put(`uploads/${uploadId}/raw/?file_name=${name}.archive.json&overwrite_if_exists=false&wait_for_processing=true`, selectedTemplate.archive)
diff --git a/gui/src/components/uploads/UploadFilesView.js b/gui/src/components/uploads/UploadFilesView.js
index 1fda79a73e..c69c6c2b73 100644
--- a/gui/src/components/uploads/UploadFilesView.js
+++ b/gui/src/components/uploads/UploadFilesView.js
@@ -24,7 +24,7 @@ import { createUploadUrl } from '../../utils'
 import { Typography } from '@material-ui/core'
 
 const UploadFilesView = React.memo(function UploadFilesView() {
-  const {installationUrl, uploadId, error, hasUpload} = useUploadPageContext()
+  const {deploymentUrl, uploadId, error, hasUpload} = useUploadPageContext()
 
   if (!hasUpload) {
     return <Page limitedWidth>
@@ -34,7 +34,7 @@ const UploadFilesView = React.memo(function UploadFilesView() {
 
   return <Page>
     <FileBrowser
-      uploadUrl={createUploadUrl(installationUrl, uploadId, '')}
+      uploadUrl={createUploadUrl(deploymentUrl, uploadId, '')}
       rootTitle="Upload files"
     />
   </Page>
diff --git a/gui/src/utils.js b/gui/src/utils.js
index 01ac4e62e7..b2fbee5450 100644
--- a/gui/src/utils.js
+++ b/gui/src/utils.js
@@ -754,7 +754,7 @@ export function getLocation() {
 /**
  * Utilities parse and analyze *nomad urls*. A nomad url identifies some *base resource*,
  * which can be:
- *  1) a nomad installation,
+ *  1) a nomad deployment,
  *  2) an upload,
  *  3) an archive, or
  *  4) a metainfo schema.
@@ -776,21 +776,21 @@ export function getLocation() {
  * version of this section definition.
  *
  * Nomad urls can be absolute or relative. Absolute urls contain all information needed to
- * locate the resource, including the nomad installation url. Relative urls can only be resolved
+ * locate the resource, including the nomad deployment url. Relative urls can only be resolved
  * when given a *base url*, which defines our "point of origin".
  *
  * Possible formats (expressions in [brackets] are optional):
  * ----------------------------------------------------------
  *  Absolute urls:
- *    <installationUrl>
- *    <installationUrl>/uploads/<uploadId> [ /raw/<rawPath> [ #<dataPath> [ @<versionHash> ] ] ]
- *    <installationUrl>/uploads/<uploadId>/archive/<entryid> [ #<dataPath> [ @<versionHash> ] ]
- *    <installationUrl>/uploads/<uploadId>/archive/mainfile/<mainfile> [ #<dataPath> [ @<versionHash> ] ]
- *  Urls relative to the current installation:
+ *    <deploymentUrl>
+ *    <deploymentUrl>/uploads/<uploadId> [ /raw/<rawPath> [ #<dataPath> [ @<versionHash> ] ] ]
+ *    <deploymentUrl>/uploads/<uploadId>/archive/<entryid> [ #<dataPath> [ @<versionHash> ] ]
+ *    <deploymentUrl>/uploads/<uploadId>/archive/mainfile/<mainfile> [ #<dataPath> [ @<versionHash> ] ]
+ *  Urls relative to the current deployment:
  *    ../uploads/<uploadId> [ /raw/<rawPath> [ #<dataPath> [ @<versionHash> ] ] ]
  *    ../uploads/<uploadId>/archive/<entryid> [ #<dataPath> [ @<versionHash> ] ]
  *    ../uploads/<uploadId>/archive/mainfile/<mainfile> [ #<dataPath> [ @<versionHash> ] ]
- *    <qualifiedName> (TODO: how to handle versions, installationUrl etc)
+ *    <qualifiedName> (TODO: how to handle versions, deploymentUrl etc)
  *  Urls relative to the current upload:
  *    ../upload [ /raw/<rawPath> [ #<dataPath> [ @<versionHash> ] ] ]
  *    ../upload/archive/<entryid> [ #<dataPath> [ @<versionHash> ] ]
@@ -799,11 +799,11 @@ export function getLocation() {
  *    #<dataPath> (preferred)
  *    /<dataPath>
  * Note:
- *  - An <installationUrl> is a normal url, starting with "http://" or "https://", locating
- *    the api of the nomad installation. Should always end with "/api".
+ *  - An <deploymentUrl> is a normal url, starting with "http://" or "https://", locating
+ *    the api of the nomad deployment. Should always end with "/api".
  *    Example:
  *      https://nomad-lab.eu/prod/rae/api
- *  - Urls with versionHash are considered to be relative to the installation rather than the
+ *  - Urls with versionHash are considered to be relative to the deployment rather than the
  *    upload or data.
  *  - The rawPath and mainFile paths need to be escaped with urlEncodePath to ensure a valid url.
  *    (i.e. each segment needs to individually be escaped using encodeURIComponent)
@@ -815,7 +815,7 @@ export function getLocation() {
  *    is a "python style" name of alphanumerics separated by dots.
  *  - If no versionHash is specified for a url identifying a metainfo schema, it means
  *    we refer to the version defined by the base url (if the url is schema-relative), otherwise
- *    the latest version (in the nomad installation in question).
+ *    the latest version (in the nomad deployment in question).
  */
 
 export const systemMetainfoUrl = `system-metainfo` // TODO: should use absolute url with hash when implemented
@@ -824,7 +824,7 @@ export const systemMetainfoUrl = `system-metainfo` // TODO: should use absolute
  * Enum for the `type` attribute of parsed/resolved url objects
  */
 export const refType = Object.freeze({
-  installation: 'installation',
+  deployment: 'deployment',
   upload: 'upload',
   archive: 'archive',
   metainfo: 'metainfo'
@@ -834,7 +834,7 @@ export const refType = Object.freeze({
  * Enum for the `relativeTo` attribute of parsed/resolved url objects
  */
 export const refRelativeTo = Object.freeze({
-  installation: 'installation',
+  deployment: 'deployment',
   upload: 'upload',
   data: 'data',
   nothing: null
@@ -845,11 +845,11 @@ export const refRelativeTo = Object.freeze({
  *  url
  *    the original url string.
  *  type
- *    One of: refType.installation | refType.upload | refType.archive | refType.metainfo
+ *    One of: refType.deployment | refType.upload | refType.archive | refType.metainfo
  *  relativeTo
- *    One of: refRelativeTo.installation | refRelativeTo.upload | refRelativeTo.data | refRelativeTo.nothing (null)
- *  installationUrl
- *    The nomad installation url, if it can be determined.
+ *    One of: refRelativeTo.deployment | refRelativeTo.upload | refRelativeTo.data | refRelativeTo.nothing (null)
+ *  deploymentUrl
+ *    The nomad deployment url, if it can be determined.
  *  uploadId
  *    The uploadId, if it can be determined.
  *  entryId
@@ -866,7 +866,7 @@ export const refRelativeTo = Object.freeze({
  *  isResolved
  *    True if the url is fully resolved, i.e. we have everything we need to fetch the data.
  *  isExternal
- *    If the url refers to a resource in an external nomad installation. Note, for relative
+ *    If the url refers to a resource in an external nomad deployment. Note, for relative
  *    urls the value will be undefined, which means we don't know.
  *
  * If the url cannot be parsed, an error is thrown.
@@ -881,18 +881,18 @@ export function parseNomadUrl(url) {
   if (typeof url !== 'string') throw new Error(prefix + 'bad type, expected string, got ' + typeof url)
 
   if (url === systemMetainfoUrl) {
-    // TODO proper handling, using a url containing installationUrl + versionHash somehow?
+    // TODO proper handling, using a url containing deploymentUrl + versionHash somehow?
     return {
       url,
-      relativeTo: refRelativeTo.installation,
+      relativeTo: refRelativeTo.deployment,
       type: refType.metainfo,
-      installationUrl: apiBase,
+      deploymentUrl: apiBase,
       isResolved: true,
       isExternal: false
     }
   }
 
-  let relativeTo, type, installationUrl, uploadId, entryId, mainfile, path, qualifiedName, versionHash
+  let relativeTo, type, deploymentUrl, uploadId, entryId, mainfile, path, qualifiedName, versionHash
   let dataPath, rest, rawPath
   if (url.startsWith('/')) {
     dataPath = url
@@ -904,11 +904,11 @@ export function parseNomadUrl(url) {
   }
 
   if (rest.startsWith('http://') || rest.startsWith('https://')) {
-    // Url includes installationUrl
+    // Url includes deploymentUrl
     let apiPos = rest.indexOf('/api/')
     if (apiPos === -1 && rest.endsWith('/api')) apiPos = rest.length - 4
-    if (apiPos === -1) throw new Error(prefix + 'absolute nomad installation url does not contain "/api"')
-    installationUrl = url.slice(0, apiPos + 4)
+    if (apiPos === -1) throw new Error(prefix + 'absolute nomad deployment url does not contain "/api"')
+    deploymentUrl = url.slice(0, apiPos + 4)
     rest = rest.slice(apiPos + 5)
     if (rest && !rest.startsWith('uploads/')) throw new Error(prefix + 'expected "/uploads/<uploadId>" in absolute url')
     relativeTo = null
@@ -918,17 +918,17 @@ export function parseNomadUrl(url) {
     relativeTo = refRelativeTo.data
   } else if (url.match(/^([a-zA-Z]\w*\.)*[a-zA-Z]\w*$/)) {
     qualifiedName = url
-    relativeTo = refRelativeTo.installation
+    relativeTo = refRelativeTo.deployment
   } else {
     throw new Error(prefix + 'bad url')
   }
   const restParts = rest.split('/')
-  if ((installationUrl && rest) || url.startsWith('../')) {
+  if ((deploymentUrl && rest) || url.startsWith('../')) {
     // Expect upload ref
     if (restParts[0] === 'uploads') {
       // Ref with upload id
-      if (!installationUrl) {
-        relativeTo = refRelativeTo.installation
+      if (!deploymentUrl) {
+        relativeTo = refRelativeTo.deployment
       }
       if (restParts.length === 1) throw new Error(prefix + 'expected "/uploads/<uploadId>" in url')
       uploadId = restParts[1]
@@ -961,9 +961,9 @@ export function parseNomadUrl(url) {
   if (qualifiedName) {
     // Refers to the global schema
     type = refType.metainfo
-  } else if (installationUrl && !uploadId) {
-    // Pure installation url
-    type = refType.installation
+  } else if (deploymentUrl && !uploadId) {
+    // Pure deployment url
+    type = refType.deployment
   } else if (dataPath !== undefined) {
     // Has dataPath
     if (!url.startsWith('#') && !url.startsWith('/') && !entryId && !mainfile && !rawPath) throw new Error(prefix + 'Unexpected dataPath without entry reference')
@@ -990,7 +990,7 @@ export function parseNomadUrl(url) {
     if (type !== refType.metainfo) throw new Error(prefix + 'versionHash can only be specified for metainfo urls.')
     if (!versionHash.match(/\w+/)) throw new Error(prefix + 'bad versionHash provided')
     if (relativeTo) {
-      relativeTo = refRelativeTo.installation
+      relativeTo = refRelativeTo.deployment
     }
   }
 
@@ -1002,7 +1002,7 @@ export function parseNomadUrl(url) {
     url,
     relativeTo,
     type,
-    installationUrl,
+    deploymentUrl,
     uploadId,
     entryId,
     mainfile,
@@ -1010,7 +1010,7 @@ export function parseNomadUrl(url) {
     qualifiedName,
     versionHash,
     isResolved: !relativeTo,
-    isExternal: installationUrl ? (installationUrl !== apiBase) : undefined,
+    isExternal: deploymentUrl ? (deploymentUrl !== apiBase) : undefined,
     toString: () => { return url + ' (parsed)' }
   }
 }
@@ -1026,7 +1026,7 @@ export function parseNomadUrl(url) {
  * by parseNomadUrl, but
  *  1) It additionally has the attribute baseUrl, which stores the baseUrl argument,
  *  2) the isResolved flag should be guaranteed to be true, and
- *  3) all applicable attributes (like installationUrl, uploadId, entryId, etc) are set
+ *  3) all applicable attributes (like deploymentUrl, uploadId, entryId, etc) are set
  *     to the resolved valules.
  *
  * NOTE, if you pass an object as the url argument, and it is not already resolved, the
@@ -1045,7 +1045,7 @@ export function resolveNomadUrl(url, baseUrl) {
     const parsedBaseUrl = parseNomadUrl(baseUrl)
     if (!parsedBaseUrl.isResolved) throw new Error(prefix + 'unresolved baseUrl')
     // Copy data from parsedBaseUrl
-    parsedUrl.installationUrl = parsedBaseUrl.installationUrl // Should always be copied
+    parsedUrl.deploymentUrl = parsedBaseUrl.deploymentUrl // Should always be copied
     if (parsedUrl.relativeTo === refRelativeTo.upload) {
       if (!parsedBaseUrl.uploadId) throw new Error(prefix + 'missing information about uploadId')
       parsedUrl.uploadId = parsedBaseUrl.uploadId
@@ -1059,7 +1059,7 @@ export function resolveNomadUrl(url, baseUrl) {
     }
   }
 
-  parsedUrl.isExternal = !!parsedUrl.installationUrl && parsedUrl.installationUrl !== apiBase
+  parsedUrl.isExternal = !!parsedUrl.deploymentUrl && parsedUrl.deploymentUrl !== apiBase
   parsedUrl.isResolved = true
   return parsedUrl
 }
@@ -1080,13 +1080,13 @@ export function resolveNomadUrlNoThrow(url, baseUrl) {
 }
 
 /**
- * Relativizes a url with respect to the provided installationUrl, uploadId, entryId, and
+ * Relativizes a url with respect to the provided deploymentUrl, uploadId, entryId, and
  * returns the shortest possible relative url, as a string.
  * This method thus basically does the *opposite* of resolveNomadUrl. You can specify either none,
- * the first, the two first, or all three arguments of the (installationUrl, uploadId, entryId)
+ * the first, the two first, or all three arguments of the (deploymentUrl, uploadId, entryId)
  * tuple. The url provided must be absolute.
  */
-export function relativizeNomadUrl(url, installationUrl, uploadId, entryId) {
+export function relativizeNomadUrl(url, deploymentUrl, uploadId, entryId) {
   const parsedUrl = parseNomadUrl(url)
   if (!parsedUrl.isResolved) {
     throw new Error(`Absolute url required, got ${url}.`)
@@ -1094,15 +1094,15 @@ export function relativizeNomadUrl(url, installationUrl, uploadId, entryId) {
   if (!parsedUrl.uploadId) {
     throw new Error(`Expected url to specify an upload, got ${url}`)
   }
-  if (parsedUrl.installationUrl !== installationUrl) {
+  if (parsedUrl.deploymentUrl !== deploymentUrl) {
     // Nothing to relativize
     return normalizeNomadUrl(parsedUrl)
   }
   if (parsedUrl.entryId === entryId) {
-    // Same installation and entry
+    // Same deployment and entry
     return '#' + (parsedUrl.path || '/')
   }
-  // Same installation, possibly also same upload
+  // Same deployment, possibly also same upload
   let rv = parsedUrl.uploadId === uploadId ? '../upload' : `../uploads/${parsedUrl.uploadId}`
   if (parsedUrl.type === refType.archive || parsedUrl.type === refType.metainfo) {
     rv = `${rv}/archive/${parsedUrl.entryId}`
@@ -1126,12 +1126,12 @@ export function relativizeNomadUrl(url, installationUrl, uploadId, entryId) {
 export function normalizeNomadUrl(url) {
   const parsedUrl = parseNomadUrl(url)
   if (!parsedUrl.isResolved) throw new Error(`Failed to normalize url ${url.url}: provided url is unresolved`)
-  if (parsedUrl.type === refType.installation) {
-    return parsedUrl.installationUrl
+  if (parsedUrl.type === refType.deployment) {
+    return parsedUrl.deploymentUrl
   } else if (parsedUrl.type === refType.upload) {
-    return `${parsedUrl.installationUrl}/uploads/${parsedUrl.uploadId}` + (parsedUrl.path ? '/raw/' + parsedUrl.path : '')
+    return `${parsedUrl.deploymentUrl}/uploads/${parsedUrl.uploadId}` + (parsedUrl.path ? '/raw/' + parsedUrl.path : '')
   } else if (parsedUrl.entryId) {
-    return `${parsedUrl.installationUrl}/uploads/${parsedUrl.uploadId}/archive/${parsedUrl.entryId}` + (
+    return `${parsedUrl.deploymentUrl}/uploads/${parsedUrl.uploadId}/archive/${parsedUrl.entryId}` + (
       parsedUrl.path ? '#' + parsedUrl.path + (parsedUrl.versionHash ? '@' + parsedUrl.versionHash : '') : '')
   } else if (parsedUrl.qualifiedName) {
     return parsedUrl.qualifiedName
@@ -1140,18 +1140,18 @@ export function normalizeNomadUrl(url) {
 }
 
 /**
- * Utility for creating an absolute upload url, given installationUrl, uploadId and an UNESCAPED rawPath
+ * Utility for creating an absolute upload url, given deploymentUrl, uploadId and an UNESCAPED rawPath
  */
-export function createUploadUrl(installationUrl, uploadId, rawPathUnescaped) {
+export function createUploadUrl(deploymentUrl, uploadId, rawPathUnescaped) {
   const rawPathEscaped = urlEncodePath(rawPathUnescaped || '')
-  return `${installationUrl}/uploads/${uploadId}/raw/${rawPathEscaped}`
+  return `${deploymentUrl}/uploads/${uploadId}/raw/${rawPathEscaped}`
 }
 
 /**
- * Utility for creating an absolute entry url, given installationUrl, uploadId, entryId, and dataPath
+ * Utility for creating an absolute entry url, given deploymentUrl, uploadId, entryId, and dataPath
  */
-export function createEntryUrl(installationUrl, uploadId, entryId, dataPath) {
-  let rv = `${installationUrl}/uploads/${uploadId}/archive/${entryId}`
+export function createEntryUrl(deploymentUrl, uploadId, entryId, dataPath) {
+  let rv = `${deploymentUrl}/uploads/${uploadId}/archive/${entryId}`
   if (dataPath) {
     rv += '#' + (!dataPath.startsWith('/') ? '/' : '') + dataPath
   }
@@ -1168,7 +1168,7 @@ export function appendDataUrl(parsedUrl, dataPathSuffix) {
   if (!parsedUrl.isResolved) throw new Error(`appendDataUrl: a resolved url required, got ${parsedUrl}`)
   if (parsedUrl.type !== refType.archive) throw new Error(`appendDataUrl: an archive url required, got ${parsedUrl}`)
   return urlJoin(
-    `${parsedUrl.installationUrl}/uploads/${parsedUrl.uploadId}/archive/${parsedUrl.entryId}#${parsedUrl.path || '/'}`,
+    `${parsedUrl.deploymentUrl}/uploads/${parsedUrl.uploadId}/archive/${parsedUrl.entryId}#${parsedUrl.path || '/'}`,
     dataPathSuffix)
 }
 
diff --git a/gui/src/utils.spec.js b/gui/src/utils.spec.js
index 06bdd8a3de..df819e8631 100644
--- a/gui/src/utils.spec.js
+++ b/gui/src/utils.spec.js
@@ -74,7 +74,7 @@ test.each([
   ['empty value', ''],
   ['bad type, expected string, got object', {}],
   ['bad type, expected string, got number', 7],
-  ['absolute nomad installation url does not contain "/api"', 'https://my.nomad.oasis.com/prod'],
+  ['absolute nomad deployment url does not contain "/api"', 'https://my.nomad.oasis.com/prod'],
   ['expected "/uploads/<uploadId>" in absolute url', 'https://my.nomad.oasis.com/prod/api/silly/continuation#/more/silly'],
   ['bad url', 'a.b.0c'],
   ['expected "/uploads/<uploadId>" in url', '../uploads'],
@@ -96,8 +96,8 @@ test.each([
 test.each([
   ['https://my.nomad.oasis.com/prod/api', {
     relativeTo: null,
-    type: refType.installation,
-    installationUrl: 'https://my.nomad.oasis.com/prod/api',
+    type: refType.deployment,
+    deploymentUrl: 'https://my.nomad.oasis.com/prod/api',
     uploadId: undefined,
     entryId: undefined,
     mainfile: undefined,
@@ -110,7 +110,7 @@ test.each([
   [`${apiBase}/uploads/SomeUploadID`, {
     relativeTo: null,
     type: refType.upload,
-    installationUrl: apiBase,
+    deploymentUrl: apiBase,
     uploadId: 'SomeUploadID',
     entryId: undefined,
     mainfile: undefined,
@@ -123,7 +123,7 @@ test.each([
   [`${apiBase}/uploads/SomeUploadID/raw`, {
     relativeTo: null,
     type: refType.upload,
-    installationUrl: apiBase,
+    deploymentUrl: apiBase,
     uploadId: 'SomeUploadID',
     entryId: undefined,
     mainfile: undefined,
@@ -136,7 +136,7 @@ test.each([
   [`${apiBase}/uploads/SomeUploadID/raw/some/path#/arch/path`, {
     relativeTo: null,
     type: refType.archive,
-    installationUrl: apiBase,
+    deploymentUrl: apiBase,
     uploadId: 'SomeUploadID',
     entryId: 'TbJz7EfLcUdPBJ_iSAXrm5cy7G1v',
     mainfile: 'some/path',
@@ -149,7 +149,7 @@ test.each([
   [`${apiBase}/uploads/SomeUploadID/archive/SomeArchID`, {
     relativeTo: null,
     type: refType.archive,
-    installationUrl: apiBase,
+    deploymentUrl: apiBase,
     uploadId: 'SomeUploadID',
     entryId: 'SomeArchID',
     mainfile: undefined,
@@ -162,7 +162,7 @@ test.each([
   [`${apiBase}/uploads/SomeUploadID/archive/SomeArchID#arch/path`, {
     relativeTo: null,
     type: refType.archive,
-    installationUrl: apiBase,
+    deploymentUrl: apiBase,
     uploadId: 'SomeUploadID',
     entryId: 'SomeArchID',
     mainfile: undefined,
@@ -175,7 +175,7 @@ test.each([
   [`${apiBase}/uploads/SomeUploadID/archive/mainfile/some/path`, {
     relativeTo: null,
     type: refType.archive,
-    installationUrl: apiBase,
+    deploymentUrl: apiBase,
     uploadId: 'SomeUploadID',
     entryId: 'TbJz7EfLcUdPBJ_iSAXrm5cy7G1v',
     mainfile: 'some/path',
@@ -188,7 +188,7 @@ test.each([
   [`${apiBase}/uploads/SomeUploadID/archive/mainfile/some/path#/arch//path`, {
     relativeTo: null,
     type: refType.archive,
-    installationUrl: apiBase,
+    deploymentUrl: apiBase,
     uploadId: 'SomeUploadID',
     entryId: 'TbJz7EfLcUdPBJ_iSAXrm5cy7G1v',
     mainfile: 'some/path',
@@ -199,9 +199,9 @@ test.each([
     isExternal: false
   }],
   [`../uploads/SomeUploadID`, {
-    relativeTo: refRelativeTo.installation,
+    relativeTo: refRelativeTo.deployment,
     type: refType.upload,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: 'SomeUploadID',
     entryId: undefined,
     mainfile: undefined,
@@ -212,9 +212,9 @@ test.each([
     isExternal: undefined
   }],
   [`../uploads/SomeUploadID/raw/some/path`, {
-    relativeTo: refRelativeTo.installation,
+    relativeTo: refRelativeTo.deployment,
     type: refType.upload,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: 'SomeUploadID',
     entryId: undefined,
     mainfile: undefined,
@@ -225,9 +225,9 @@ test.each([
     isExternal: undefined
   }],
   [`../uploads/SomeUploadID/raw/some/path#/definitions/some/schema/path`, {
-    relativeTo: refRelativeTo.installation,
+    relativeTo: refRelativeTo.deployment,
     type: refType.metainfo,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: 'SomeUploadID',
     entryId: 'TbJz7EfLcUdPBJ_iSAXrm5cy7G1v',
     mainfile: 'some/path',
@@ -238,9 +238,9 @@ test.each([
     isExternal: undefined
   }],
   [`../uploads/SomeUploadID/raw/some/path#definitions/some/schema/path@SomeVersionHash`, {
-    relativeTo: refRelativeTo.installation,
+    relativeTo: refRelativeTo.deployment,
     type: refType.metainfo,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: 'SomeUploadID',
     entryId: 'TbJz7EfLcUdPBJ_iSAXrm5cy7G1v',
     mainfile: 'some/path',
@@ -251,9 +251,9 @@ test.each([
     isExternal: undefined
   }],
   [`../uploads/SomeUploadID/archive/SomeArchID`, {
-    relativeTo: refRelativeTo.installation,
+    relativeTo: refRelativeTo.deployment,
     type: refType.archive,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: 'SomeUploadID',
     entryId: 'SomeArchID',
     mainfile: undefined,
@@ -264,9 +264,9 @@ test.each([
     isExternal: undefined
   }],
   [`../uploads/SomeUploadID/archive/SomeArchID#/arch/path`, {
-    relativeTo: refRelativeTo.installation,
+    relativeTo: refRelativeTo.deployment,
     type: refType.archive,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: 'SomeUploadID',
     entryId: 'SomeArchID',
     mainfile: undefined,
@@ -277,9 +277,9 @@ test.each([
     isExternal: undefined
   }],
   [`../uploads/SomeUploadID/archive/mainfile/some/path`, {
-    relativeTo: refRelativeTo.installation,
+    relativeTo: refRelativeTo.deployment,
     type: refType.archive,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: 'SomeUploadID',
     entryId: 'TbJz7EfLcUdPBJ_iSAXrm5cy7G1v',
     mainfile: 'some/path',
@@ -290,9 +290,9 @@ test.each([
     isExternal: undefined
   }],
   [`../uploads/SomeUploadID/archive/mainfile/some/path#/definitions/some/schema/path`, {
-    relativeTo: refRelativeTo.installation,
+    relativeTo: refRelativeTo.deployment,
     type: refType.metainfo,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: 'SomeUploadID',
     entryId: 'TbJz7EfLcUdPBJ_iSAXrm5cy7G1v',
     mainfile: 'some/path',
@@ -303,9 +303,9 @@ test.each([
     isExternal: undefined
   }],
   [`../uploads/SomeUploadID/archive/mainfile/some/path#definitions/some//schema/path@SomeVersionHash`, {
-    relativeTo: refRelativeTo.installation,
+    relativeTo: refRelativeTo.deployment,
     type: refType.metainfo,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: 'SomeUploadID',
     entryId: 'TbJz7EfLcUdPBJ_iSAXrm5cy7G1v',
     mainfile: 'some/path',
@@ -318,7 +318,7 @@ test.each([
   [`../upload`, {
     relativeTo: refRelativeTo.upload,
     type: refType.upload,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: undefined,
     entryId: undefined,
     mainfile: undefined,
@@ -331,7 +331,7 @@ test.each([
   [`../upload/raw/some/path`, {
     relativeTo: refRelativeTo.upload,
     type: refType.upload,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: undefined,
     entryId: undefined,
     mainfile: undefined,
@@ -344,7 +344,7 @@ test.each([
   [`../upload/raw/some/path#/arch/path`, {
     relativeTo: refRelativeTo.upload,
     type: refType.archive,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: undefined,
     entryId: undefined,
     mainfile: 'some/path',
@@ -357,7 +357,7 @@ test.each([
   [`../upload/archive/SomeArchID`, {
     relativeTo: refRelativeTo.upload,
     type: refType.archive,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: undefined,
     entryId: 'SomeArchID',
     mainfile: undefined,
@@ -370,7 +370,7 @@ test.each([
   [`../upload/archive/SomeArchID#/definitions/path`, {
     relativeTo: refRelativeTo.upload,
     type: refType.metainfo,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: undefined,
     entryId: 'SomeArchID',
     mainfile: undefined,
@@ -381,9 +381,9 @@ test.each([
     isExternal: undefined
   }],
   [`../upload/archive/SomeArchID#/definitions/path@SomeVersionHash`, {
-    relativeTo: refRelativeTo.installation,
+    relativeTo: refRelativeTo.deployment,
     type: refType.metainfo,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: undefined,
     entryId: 'SomeArchID',
     mainfile: undefined,
@@ -396,7 +396,7 @@ test.each([
   [`../upload/archive/mainfile/some/path`, {
     relativeTo: refRelativeTo.upload,
     type: refType.archive,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: undefined,
     entryId: undefined,
     mainfile: 'some/path',
@@ -409,7 +409,7 @@ test.each([
   [`../upload/archive/mainfile/some/path#/arch/path`, {
     relativeTo: refRelativeTo.upload,
     type: refType.archive,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: undefined,
     entryId: undefined,
     mainfile: 'some/path',
@@ -422,7 +422,7 @@ test.each([
   [`#/arch/path`, {
     relativeTo: refRelativeTo.data,
     type: refType.archive,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: undefined,
     entryId: undefined,
     mainfile: undefined,
@@ -435,7 +435,7 @@ test.each([
   [`#/definitions/def/path`, {
     relativeTo: refRelativeTo.data,
     type: refType.metainfo,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: undefined,
     entryId: undefined,
     mainfile: undefined,
@@ -448,7 +448,7 @@ test.each([
   [`/arch/path`, {
     relativeTo: refRelativeTo.data,
     type: refType.archive,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: undefined,
     entryId: undefined,
     mainfile: undefined,
@@ -461,7 +461,7 @@ test.each([
   [`/definitions/def/path`, {
     relativeTo: refRelativeTo.data,
     type: refType.metainfo,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: undefined,
     entryId: undefined,
     mainfile: undefined,
@@ -472,9 +472,9 @@ test.each([
     isExternal: undefined
   }],
   [`nomad.datamodel.some.path`, {
-    relativeTo: refRelativeTo.installation,
+    relativeTo: refRelativeTo.deployment,
     type: refType.metainfo,
-    installationUrl: undefined,
+    deploymentUrl: undefined,
     uploadId: undefined,
     entryId: undefined,
     mainfile: undefined,
@@ -487,7 +487,7 @@ test.each([
 ])('parseNomadUrl: %s', (url, expectedResult) => {
   const result = parseNomadUrl(url)
   expect(result.url).toBe(url)
-  for (const key of ['relativeTo', 'type', 'installationUrl', 'uploadId', 'entryId', 'mainfile', 'path']) {
+  for (const key of ['relativeTo', 'type', 'deploymentUrl', 'uploadId', 'entryId', 'mainfile', 'path']) {
     expect(key in result).toBe(true)
     expect(result[key]).toBe(expectedResult[key])
   }
diff --git a/nomad/app/v1/routers/uploads.py b/nomad/app/v1/routers/uploads.py
index e8c81441a2..5d89c56eff 100644
--- a/nomad/app/v1/routers/uploads.py
+++ b/nomad/app/v1/routers/uploads.py
@@ -1726,7 +1726,7 @@ async def post_upload_bundle(
         set_from_oasis: Optional[bool] = FastApiQuery(
             None,
             description=strip('''
-                If the `from_oasis` flag and `oasis_deployment_id` should be set
+                If the `from_oasis` flag and `oasis_deployment_url` should be set
                 *(only admins can change this setting)*.''')),
         trigger_processing: Optional[bool] = FastApiQuery(
             None,
diff --git a/nomad/config.py b/nomad/config.py
index 4d3a11cc4d..b66522716d 100644
--- a/nomad/config.py
+++ b/nomad/config.py
@@ -188,8 +188,7 @@ services = NomadConfig(
 )
 
 oasis = NomadConfig(
-    central_nomad_api_url='https://nomad-lab.eu/prod/v1/api',
-    central_nomad_deployment_id='nomad-lab.eu/prod/v1',
+    central_nomad_deployment_url='https://nomad-lab.eu/prod/v1/api',
     allowed_users=None,  # a list of usernames or user account emails
     uses_central_user_management=False,
     is_oasis=False
@@ -345,7 +344,8 @@ datacite = NomadConfig(
 meta = NomadConfig(
     version='1.1.5',
     commit=gitinfo.commit,
-    deployment='devel',
+    deployment='devel',  # A human-friendly name of the nomad deployment
+    deployment_url='https://my-oasis.org/api',  # The deployment's url (api url).
     label=None,
     default_domain='dft',
     service='unknown nomad service',
@@ -354,7 +354,6 @@ meta = NomadConfig(
     homepage='https://nomad-lab.eu',
     source_url='https://gitlab.mpcdf.mpg.de/nomad-lab/nomad-FAIR',
     maintainer_email='markus.scheidgen@physik.hu-berlin.de',
-    deployment_id='nomad-lab.eu/prod/v1',
     beta=None
 )
 
@@ -404,7 +403,7 @@ bundle_import = NomadConfig(
         include_datasets=True,
         include_bundle_info=True,  # Keeps the bundle_info.json file (not necessary but nice to have)
         keep_original_timestamps=False,  # If all time stamps (create time, publish time etc) should be imported from the bundle
-        set_from_oasis=True,  # If the from_oasis flag and oasis_deployment_id should be set
+        set_from_oasis=True,  # If the from_oasis flag and oasis_deployment_url should be set
         delete_upload_on_fail=False,  # If False, it is just removed from the ES index on failure
         delete_bundle_when_done=True,  # Deletes the source bundle when done (regardless of success)
         also_delete_bundle_parent_folder=True,  # Also deletes the parent folder, if it is empty.
diff --git a/nomad/datamodel/context.py b/nomad/datamodel/context.py
index 867c180c36..63f9168677 100644
--- a/nomad/datamodel/context.py
+++ b/nomad/datamodel/context.py
@@ -156,7 +156,7 @@ class Context(MetainfoContext):
         we're processing locally or not). For non-ServerContexts, the method does nothing.
 
         Note, that *modifying* existing files is discouraged, as this needs to be done with
-        care to avoid infinite loops of files modifyin each other etc. We would thus recommend
+        care to avoid infinite loops of files modifying each other etc. We would thus recommend
         to only use this method for *adding* files. If you still want to modify existing
         files, you must set the `allow_modify` flag, otherwise the call will raise an exception
         if the entry already exists. Also note that this method should not be used to modify
diff --git a/nomad/infrastructure.py b/nomad/infrastructure.py
index 72bce11b28..226fd24579 100644
--- a/nomad/infrastructure.py
+++ b/nomad/infrastructure.py
@@ -241,7 +241,7 @@ class OasisUserManagement(UserManagement):
         if users_api_url:
             self._users_api_url = users_api_url
         else:
-            self._users_api_url = f'{config.oasis.central_nomad_api_url}/v1/users'
+            self._users_api_url = f'{config.oasis.central_nomad_deployment_url}/v1/users'
 
     def add_user(self, user, bcrypt_password=None, invite=False):
         raise NotImplementedError(
diff --git a/nomad/processing/data.py b/nomad/processing/data.py
index 96868ea994..8b074881ea 100644
--- a/nomad/processing/data.py
+++ b/nomad/processing/data.py
@@ -1243,7 +1243,7 @@ class Upload(Proc):
     license = StringField(default='CC BY 4.0', required=True)
 
     from_oasis = BooleanField(default=False)
-    oasis_deployment_id = StringField(default=None)
+    oasis_deployment_url = StringField(default=None)
     published_to = ListField(StringField())
 
     # Process parameters and state vars that need to be persisted during the process
@@ -1427,7 +1427,7 @@ class Upload(Proc):
         '''
         assert self.published, \
             'Only published uploads can be published to the central NOMAD.'
-        assert config.oasis.central_nomad_deployment_id not in self.published_to, \
+        assert config.oasis.central_nomad_deployment_url not in self.published_to, \
             'Upload is already published to the central NOMAD.'
 
         tmp_dir = create_tmp_dir('export_' + self.upload_id)
@@ -1448,7 +1448,7 @@ class Upload(Proc):
             upload_parameters: Dict[str, Any] = {}
             if embargo_length is not None:
                 upload_parameters.update(embargo_length=embargo_length)
-            upload_url = f'{config.oasis.central_nomad_api_url}/v1/uploads/bundle'
+            upload_url = f'{config.oasis.central_nomad_deployment_url}/v1/uploads/bundle'
 
             with open(bundle_path, 'rb') as f:
                 response = requests.post(
@@ -1460,7 +1460,7 @@ class Upload(Proc):
                     status_code=response.status_code, body=response.text)
                 raise ProcessFailure('Error message from central NOMAD: {response.text}')
 
-            self.published_to.append(config.oasis.central_nomad_deployment_id)
+            self.published_to.append(config.oasis.central_nomad_deployment_url)
         finally:
             PathObject(tmp_dir).delete()
 
@@ -2282,7 +2282,7 @@ class Upload(Proc):
             bundle_info = bundle.bundle_info
             # Sanity checks
             required_keys_root_level = (
-                'upload_id', 'source.version', 'source.commit', 'source.deployment', 'source.deployment_id',
+                'upload_id', 'source.version', 'source.commit', 'source.deployment', 'source.deployment_url',
                 'export_options.include_raw_files',
                 'export_options.include_archive_files',
                 'export_options.include_datasets',
@@ -2326,7 +2326,7 @@ class Upload(Proc):
             # Define which keys we think okay to copy from the bundle
             upload_keys_to_copy = [
                 'upload_name', 'main_author', 'coauthors', 'reviewers', 'embargo_length', 'license',
-                'from_oasis', 'oasis_deployment_id']
+                'from_oasis', 'oasis_deployment_url']
             if settings.keep_original_timestamps:
                 upload_keys_to_copy.extend(('upload_create_time', 'publish_time',))
             try:
@@ -2345,13 +2345,13 @@ class Upload(Proc):
                     'Timestamp is in the future')
             if settings.set_from_oasis:
                 self.from_oasis = True
-                source_deployment_id = bundle_info['source']['deployment_id']
-                assert source_deployment_id, 'No source deployment_id defined'
-                if not self.oasis_deployment_id:
-                    self.oasis_deployment_id = source_deployment_id
-                    # Note, if oasis_deployment_id is set in the bundle_info, we keep this
+                source_deployment_url = bundle_info['source']['deployment_url']
+                assert source_deployment_url, 'No source deployment_url defined'
+                if not self.oasis_deployment_url:
+                    self.oasis_deployment_url = source_deployment_url
+                    # Note, if oasis_deployment_url is set in the bundle_info, we keep this
                     # value as it is, since it indicates that the upload has been imported from
-                    # somewhere else originally (i.e. source_deployment_id would not be the
+                    # somewhere else originally (i.e. source_deployment_url would not be the
                     # original source)
 
             # Dataset definitions
diff --git a/ops/docker-compose/nomad-oasis-with-keycloak/configs/nomad.yaml b/ops/docker-compose/nomad-oasis-with-keycloak/configs/nomad.yaml
index e99d50eae4..a0d17268ed 100644
--- a/ops/docker-compose/nomad-oasis-with-keycloak/configs/nomad.yaml
+++ b/ops/docker-compose/nomad-oasis-with-keycloak/configs/nomad.yaml
@@ -18,7 +18,7 @@ keycloak:
 
 meta:
   deployment: 'oasis'
-  deployment_id: 'my_oasis'
+  deployment_url: 'https://my-oasis.org/api'
   maintainer_email: 'me@my-oasis.org'
 
 mongo:
diff --git a/ops/docker-compose/nomad-oasis/configs/nomad.yaml b/ops/docker-compose/nomad-oasis/configs/nomad.yaml
index b64e2b8477..512eca533e 100644
--- a/ops/docker-compose/nomad-oasis/configs/nomad.yaml
+++ b/ops/docker-compose/nomad-oasis/configs/nomad.yaml
@@ -11,7 +11,7 @@ north:
 
 meta:
   deployment: 'oasis'
-  deployment_id: 'my_oasis'
+  deployment_url: 'https://my-oasis.org/api'
   maintainer_email: 'me@my-oasis.org'
 
 mongo:
diff --git a/tests/app/v1/routers/test_uploads.py b/tests/app/v1/routers/test_uploads.py
index 86401e0cdd..deac81ac2b 100644
--- a/tests/app/v1/routers/test_uploads.py
+++ b/tests/app/v1/routers/test_uploads.py
@@ -1675,8 +1675,8 @@ def test_post_upload_action_publish_to_central_nomad(
                     'n_quantities', 'quantities'):  # TODO: n_quantities and quantities update problem?
                 assert new_entry_metadata_dict[k] == v, f'Metadata not matching: {k}'
         assert new_entry.datasets == ['dataset_id']
-        assert old_upload.published_to[0] == config.oasis.central_nomad_deployment_id
-        assert new_upload.from_oasis and new_upload.oasis_deployment_id
+        assert old_upload.published_to[0] == config.oasis.central_nomad_deployment_url
+        assert new_upload.from_oasis and new_upload.oasis_deployment_url
         assert new_upload.embargo_length == embargo_length
         assert old_upload.upload_files.access == 'restricted' if old_upload.with_embargo else 'public'
         assert new_upload.upload_files.access == 'restricted' if new_upload.with_embargo else 'public'
@@ -1907,7 +1907,7 @@ def test_post_upload_bundle(
     if expected_status_code == 200:
         assert_processing(client, upload_id, user_auth, published=publish)
         upload = Upload.get(upload_id)
-        assert upload.from_oasis and upload.oasis_deployment_id
+        assert upload.from_oasis and upload.oasis_deployment_url
     return
 
 
diff --git a/tests/conftest.py b/tests/conftest.py
index 359e091eaa..28a546d754 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -710,7 +710,7 @@ def oasis_publishable_upload(
     monkeypatch.setattr('nomad.config.oasis.is_oasis', True)
     monkeypatch.setattr('nomad.config.keycloak.username', test_user.username)
 
-    monkeypatch.setattr('nomad.config.oasis.central_nomad_api_url', '/api')
+    monkeypatch.setattr('nomad.config.oasis.central_nomad_deployment_url', '/api')
 
     # create a dataset to also test this aspect of oasis uploads
     entry = non_empty_processed.successful_entries[0]
diff --git a/tests/processing/test_data.py b/tests/processing/test_data.py
index 07bf468402..d09594bf21 100644
--- a/tests/processing/test_data.py
+++ b/tests/processing/test_data.py
@@ -351,8 +351,8 @@ def test_publish_to_central_nomad(
                 'n_quantities', 'quantities'):  # TODO: n_quantities and quantities update problem?
             assert new_entry_metadata_dict[k] == v, f'Metadata not matching: {k}'
     assert new_entry.datasets == ['dataset_id']
-    assert old_upload.published_to[0] == config.oasis.central_nomad_deployment_id
-    assert new_upload.from_oasis and new_upload.oasis_deployment_id
+    assert old_upload.published_to[0] == config.oasis.central_nomad_deployment_url
+    assert new_upload.from_oasis and new_upload.oasis_deployment_url
     assert new_upload.embargo_length == embargo_length
     assert old_upload.upload_files.access == 'restricted' if old_upload.with_embargo else 'public'
     assert new_upload.upload_files.access == 'restricted' if new_upload.with_embargo else 'public'
-- 
GitLab