Commit e07ca8cd authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Merged parser integration.

parents 0ba6bbc5 f41458d1
......@@ -11,7 +11,7 @@ __pycache__
.coverage
try.http
project/
test_*
test_*/
local/
target/
*.swp
......
......@@ -17,13 +17,16 @@ stages:
- deploy
variables:
TEST_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair:test
TEST_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair:${CI_COMMIT_REF_NAME}
RELEASE_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair:latest
FRONTEND_TEST_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair/frontend:test
FRONTEND_TEST_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair/frontend:${CI_COMMIT_REF_NAME}
FRONTEND_RELEASE_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair/frontend:latest
RAWAPI_TEST_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair/rawapi:test
RAWAPI_TEST_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair/rawapi:${CI_COMMIT_REF_NAME}
RAWAPI_RELEASE_IMAGE: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair/rawapi:latest
KUBECONFIG: /etc/deploy/config
STAGING_NAMESPACE: nomad-fairdi
build:
stage: build
script:
......@@ -53,8 +56,8 @@ buildrawapi:
- docker login -u gitlab-ci-token -p $CI_BUILD_TOKEN gitlab-registry.mpcdf.mpg.de
- docker build -t $RAWAPI_TEST_IMAGE -f rawapi.Dockerfile .
- docker push $RAWAPI_TEST_IMAGE
except:
- /^dev-.*$/
only:
- rawapi
linting:
......@@ -160,22 +163,8 @@ release_rawapi:
- docker pull $RAWAPI_TEST_IMAGE
- docker tag $RAWAPI_TEST_IMAGE $RAWAPI_RELEASE_IMAGE
- docker push $RAWAPI_RELEASE_IMAGE
except:
- /^dev-.*$/
when: manual
deploy:
stage: deploy
script:
- docker login -u gitlab-ci-token -p $CI_BUILD_TOKEN gitlab-registry.mpcdf.mpg.de
- cd ops/docker-compose/nomad
# secret .env file is stored on the "production" machine and is bind mounted into the
# runner container at /nomad/config/.env
- cp /nomad/config/.env .
- docker-compose -f docker-compose.yml -f docker-compose.prod.yml build
- docker-compose -f docker-compose.yml -f docker-compose.prod.yml up --no-deps -d api worker gui
except:
- /^dev-.*$/
only:
- rawapi
when: manual
deploy_rawapi:
......@@ -187,6 +176,41 @@ deploy_rawapi:
# runner container at /nomad/config/.env
- cp /nomad/config/.rawapi_env .env
- docker-compose -f docker-compose.yml -f docker-compose.prod.yml up -d
only:
- rawapi
when: manual
deploy:
stage: deploy
image: dtzar/helm-kubectl
before_script:
- mkdir -p /etc/deploy
# kube_config is a CI/CD variable set in GitLab GUI
- echo $CI_KUBE_CONFIG | base64 -d > /etc/deploy/config
- helm init --client-only
- helm repo add stable https://kubernetes-charts.storage.googleapis.com/
- helm repo add incubator https://kubernetes-charts-incubator.storage.googleapis.com/
- helm repo update
script:
- cd ops/helm/nomad
- export KUBECONFIG=/etc/deploy/config
- helm dep build
- export NOMAD_VERSION="$(grep "version" Chart.yaml | cut -d" " -f2)"
- export NUMERIC_VERSION="$(echo ${NOMAD_VERSION} | cut -d"." -f1-2)"
- export RELEASE_NAME="nomad-v${NOMAD_VERSION//./-}"
- export DEPLOYS=$(helm ls | grep $RELEASE_NAME | wc -l)
- export EXTERNAL_PATH="/fairdi/nomad/v${NOMAD_VERSION}"
- export DBNAME="fairdi_nomad_v${NOMAD_VERSION//./_}"
- export FILES_PATH="/scratch/nomad-fair/fs/nomad_v${NOMAD_VERSION}"
- if [ ${DEPLOYS} -eq 0 ]; then
helm install --name=${RELEASE_NAME} . --namespace=${STAGING_NAMESPACE}
--set proxy.nodePort="300${NUMERIC_VERSION//./}"
--set proxy.external.path=${EXTERNAL_PATH}
--set dbname=${DBNAME}
--set volumes.files=${FILES_PATH};
else
helm upgrade ${RELEASE_NAME} . --namespace=${STAGING_NAMESPACE} --recreate-pods;
fi
except:
- /^dev-.*$/
when: manual
......@@ -44,7 +44,7 @@
"cwd": "${workspaceFolder}",
"program": "${workspaceFolder}/.pyenv/bin/pytest",
"args": [
"-sv", "tests/test_normalizing.py::test_normalizer[parsers/cpmd-tests/data/parsers/cpmd/geo_output.out]"
"-sv", "tests/test_api.py::TestUploads::test_put[None-multipart-tests/data/proc/examples_template.zip]"
]
},
{
......
......@@ -71,13 +71,10 @@ COPY --from=build /install/.dependencies/nomad-meta-info /app/.dependencies/noma
COPY --from=build /install/docs/.build /app/docs/.build
RUN mkdir -p /app/.volumes/fs
RUN mkdir -p /nomad
RUN useradd -ms /bin/bash nomad
RUN chown -R nomad /app
RUN chown -R nomad /nomad
USER nomad
VOLUME /app/.volumes/fs
VOLUME /nomad
EXPOSE 8000
No preview for this file type
......@@ -13,7 +13,7 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
from recommonmark.transform import AutoStructify
# from recommonmark.transform import AutoStructify
sys.path.insert(0, os.path.abspath('..'))
......@@ -48,16 +48,13 @@ extensions = [
'sphinxcontrib.httpdomain',
'sphinxcontrib.autohttp.flask',
'sphinxcontrib.autohttp.flaskqref',
'celery.contrib.sphinx'
'celery.contrib.sphinx',
'm2r'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
......@@ -179,8 +176,8 @@ todo_include_todos = True
# Enably sphinx specifc markdown features
def setup(app):
app.add_stylesheet('css/custom.css')
app.add_config_value('recommonmark_config', {
'enable_auto_doc_ref': True,
'enable_eval_rst': True
}, True)
app.add_transform(AutoStructify)
# app.add_config_value('recommonmark_config', {
# 'enable_auto_doc_ref': True,
# 'enable_eval_rst': True
# }, True)
# app.add_transform(AutoStructify)
......@@ -11,4 +11,5 @@ and infrastructure with a simplyfied architecture and consolidated code base.
setup
dev_guidelines
api
ops
reference
Operating nomad
===============
.. mdinclude:: ../ops/README.md
.. mdinclude:: ../ops/containers/README.md
.. mdinclude:: ../ops/docker-compose/README.md
.. mdinclude:: ../ops/helm/README.md
\ No newline at end of file
# Reference
## nomad.config
```eval_rst
Reference
=========
nomad.config
------------
.. automodule:: nomad.config
:members:
```
## nomad.dependencies
```eval_rst
nomad.dependencies
------------------
.. automodule:: nomad.dependencies
```
## nomad.files
```eval_rst
nomad.files
-----------
.. automodule:: nomad.files
```
## nomad.parsing
```eval_rst
nomad.parsing
-------------
.. automodule:: nomad.parsing
```
## nomad.processing
```eval_rst
nomad.processing
----------------
.. automodule:: nomad.processing
```
## nomad.repo
```eval_rst
nomad.repo
----------
.. automodule:: nomad.repo
```
## nomad.user
```eval_rst
nomad.user
----------
.. automodule:: nomad.coe_repo
```
## nomad.api
```eval_rst
nomad.api
---------
.. automodule:: nomad.api
```
## nomad.utils
```eval_rst
.. automodule:: nomad.utils
```
\ No newline at end of file
nomad.utils
-----------
.. automodule:: nomad.utils
\ No newline at end of file
REACT_APP_API_BASE = 'http://localhost:8000/nomad/api'
REACT_APP_OBJECT_BASE = 'http://localhost:9007'
REACT_APP_APP_BASE = '/nomad'
REACT_APP_APP_STATIC_BASE = ''
REACT_APP_DEBUG = 'true'
\ No newline at end of file
REACT_APP_DEBUG = 'true'
REACT_KIBANA_BASE = '/nomad/kibana'
\ No newline at end of file
REACT_APP_API_BASE = '/nomad/api'
REACT_APP_OBJECT_BASE = '/nomad/objects'
REACT_APP_APP_BASE = '/nomad'
REACT_APP_APP_STATIC_BASE = '/nomad'
REACT_APP_DEBUG = 'true'
\ No newline at end of file
......@@ -28,10 +28,17 @@ COPY yarn.lock /nomad/app/yarn.lock
RUN yarn
COPY . /nomad/app
RUN yarn build
RUN yarn run build
# production environment
FROM nginx:1.13.9-alpine
COPY --from=build /nomad/app/build /app/nomad
COPY ./gui.conf /etc/nginx/conf.d/default.conf
CMD ["nginx", "-g", "daemon off;"]
WORKDIR /app/nomad
CMD nginx -g "daemon off;"
VOLUME /etc/nginx/conf.d
VOLUME /nomad/app/config
EXPOSE 8080/tcp
......@@ -3,14 +3,16 @@
"version": "0.1.0",
"private": true,
"dependencies": {
"@material-ui/core": "^1.5.1",
"@material-ui/docs": "^1.0.0-alpha.5",
"@material-ui/icons": "^2.0.3",
"@material-ui/core": "^3.9.0",
"@material-ui/icons": "^3.0.2",
"@navjobs/upload": "^3.1.3",
"base-64": "^0.1.0",
"fetch": "^1.1.0",
"file-saver": "^2.0.0",
"html-to-react": "^1.3.3",
"marked": "^0.6.0",
"react": "^16.4.2",
"react-cookie": "^3.0.8",
"react-copy-to-clipboard": "^5.0.1",
"react-dom": "^16.4.2",
"react-dropzone": "^5.0.1",
......@@ -19,14 +21,16 @@
"react-router-dom": "^4.3.1",
"react-router-hash-link": "^1.2.0",
"react-scripts": "1.1.4",
"react-swipeable-views": "^0.13.0",
"recompose": "^0.28.2",
"swagger-client": "^3.8.22",
"three.js": "^0.77.1",
"url-parse": "^1.4.3"
},
"scripts": {
"metainfo": "git clone --single-branch -b nomad-fair http://gitlab.mpcdf.mpg.de/nomad-lab/nomad-meta-info.git --depth=1 public/metainfo",
"gitinfo": "echo \"{ \\\"log\\\": \\\"$(git log -1 --oneline)\\\", \\\"ref\\\": \\\"$(git describe --all)\\\", \\\"version\\\": \\\"$(git describe)\\\" }\" > src/gitinfo.json",
"start": "yarn metainfo; react-scripts start",
"build": "yarn metainfo; react-scripts build",
"start": "react-scripts start",
"build": "react-scripts build",
"test": "react-scripts test --env=jsdom",
"eject": "react-scripts eject"
},
......@@ -35,10 +39,11 @@
"eslint": "^4.19.1",
"eslint-config-standard": "^11.0.0",
"eslint-plugin-import": "^2.14.0",
"eslint-plugin-node": "^8.0.1",
"eslint-plugin-promise": "^3.7.0",
"eslint-plugin-react": "^7.11.1",
"eslint-plugin-standard": "^3.1.0",
"serve": "^10.0.0"
},
"homepage": "http://mywebsite.com/nomad"
"homepage": "."
}
// this is a public file and must only contain configuration that can be publically
// available
window.nomadEnv = {
"apiBase": "/nomad/api",
"appBase": "/nomad",
"appStaticBase": "/nomad",
"appDebug": false
};
\ No newline at end of file
......@@ -29,6 +29,7 @@
<noscript>
You need to enable JavaScript to run this app.
</noscript>
<script src="%PUBLIC_URL%/config/env.js"></script>
<div id="root"></div>
<!--
This HTML file is a template.
......
import { UploadRequest } from '@navjobs/upload'
import { apiBase, appStaticBase } from './config'
const auth_headers = {
Authorization: 'Basic ' + btoa('sheldon.cooper@nomad-fairdi.tests.de:password')
}
const networkError = () => {
throw Error('Network related error, cannot reach API or object storage.')
}
const handleJsonErrors = () => {
throw Error('Server return unexpected data format.')
}
const handleResponseErrors = (response) => {
if (!response.ok) {
return response.json()
.catch(() => {
throw Error(`API/object storage error (${response.status}): ${response.statusText}`)
}).then(data => {
throw Error(`API/object storage error (${response.status}): ${data.message}`)
})
}
return response
}
class Upload {
constructor(json, created) {
this.uploading = null
this._assignFromJson(json, created)
}
uploadFile(file) {
console.assert(this.upload_url)
this.uploading = 0
const uploadFileWithProgress = async() => {
let { error, aborted } = await UploadRequest(
{
request: {
url: this.upload_url,
method: 'PUT',
headers: {
'Content-Type': 'application/gzip',
...auth_headers
}
},
files: [file],
progress: value => {
console.log(value)
this.uploading = value
}
}
)
if (error) {
networkError(error)
}
if (aborted) {
throw Error('User abort')
}
}
return uploadFileWithProgress()
.then(() => this)
}
_assignFromJson(uploadJson, created) {
Object.assign(this, uploadJson)
if (this.current_task !== this.tasks[0]) {
this.uploading = 100
this.waiting = false
} else if (!created && this.uploading === null) {
// if data came from server during a normal get (not create) and its still uploading
// and the uploading is also not controlled locally then it ought to be a manual upload
this.waiting = true
}
}
get(page, perPage, orderBy, order) {
if (!page) page = 1
if (!perPage) perPage = 5
if (!orderBy) orderBy = 'mainfile'
if (!order) order = 'desc'
order = order === 'desc' ? -1 : 1
if (this.uploading !== null && this.uploading !== 100) {
return new Promise(resolve => resolve(this))
} else {
const qparams = `page=${page}&per_page=${perPage}&order_by=${orderBy}&order=${order}`
return fetch(
`${apiBase}/uploads/${this.upload_id}?${qparams}`,
{
method: 'GET',
headers: auth_headers
})
.catch(networkError)
.then(handleResponseErrors)
.then(response => response.json())
.then(uploadJson => {
this._assignFromJson(uploadJson)
return this
})
}
}
}
function createUpload(name) {
const fetchData = {
method: 'POST',
body: JSON.stringify({
name: name
}),
headers: {
'Content-Type': 'application/json',
...auth_headers
}
}
return fetch(`${apiBase}/uploads`, fetchData)
.catch(networkError)
.then(handleResponseErrors)
.then(response => response.json())
.then(uploadJson => new Upload(uploadJson, true))
}
function getUploads() {
return fetch(
`${apiBase}/uploads`,
{
method: 'GET',
headers: auth_headers
})
.catch(networkError)
.then(handleResponseErrors)
.then(response => response.json())
.then(uploadsJson => uploadsJson.map(uploadJson => new Upload(uploadJson)))
}
function archive(uploadHash, calcHash) {
return fetch(archiveUrl(uploadHash, calcHash))
.catch(networkError)
.then(handleResponseErrors)
.then(response => response.json())
}
function calcProcLog(archiveId) {
return fetch(`${apiBase}/logs/${archiveId}`)
.catch(networkError)
.then(response => {
if (!response.ok) {
if (response.status === 404) {
return ''
} else {
return handleResponseErrors(response)
}
} else {
return response.text()
}
})
}
function archiveUrl(uploadHash, calcHash) {
return `${apiBase}/archive/${uploadHash}/${calcHash}`
}
function repo(uploadHash, calcHash) {
return fetch(`${apiBase}/repo/${uploadHash}/${calcHash}`)
.catch(networkError)
.then(handleResponseErrors)
.then(response => response.json())
}
function repoAll(page, perPage, owner) {
return fetch(
`${apiBase}/repo?page=${page}&per_page=${perPage}&owner=${owner || 'all'}`,
{
method: 'GET',
headers: auth_headers
})
.catch(networkError)
.then(handleResponseErrors)
.then(response => response.json())
}
function deleteUpload(uploadId) {
return fetch(
`${apiBase}/uploads/${uploadId}`,
{
method: 'DELETE',
headers: auth_headers
})
.catch(networkError)
.then(handleResponseErrors)
.then(response => response.json())
}
function unstageUpload(uploadId) {
return fetch(
`${apiBase}/uploads/${uploadId}`,
{
method: 'POST',
body: JSON.stringify({
operation: 'unstage'
}),
headers: {
'Content-Type': 'application/json',
...auth_headers
}
})
.catch(networkError)
.then(handleResponseErrors)
.then(response => response.json())
}
let cachedMetaInfo = null
async function getMetaInfo() {
if (cachedMetaInfo) {
return cachedMetaInfo
} else {
const loadMetaInfo = async(path) => {
return fetch(`${appStaticBase}/metainfo/meta_info/nomad_meta_info/${path}`)
.catch(networkError)
.then(handleResponseErrors)
.then(response => response.json())
.catch(handleJsonErrors)
.then(data => {
if (!cachedMetaInfo) {
cachedMetaInfo = {}
}
if (data.dependencies) {
data.dependencies.forEach(dep => {
loadMetaInfo(dep.relativePath)
})
}
if (data.metaInfos) {
data.metaInfos.forEach(info => {
cachedMetaInfo[info.name] = info
})
}
})
}
await loadMetaInfo('all.nomadmetainfo.json')
return cachedMetaInfo
}
}
const api = {
createUpload: createUpload,
deleteUpload: deleteUpload,
unstageUpload: unstageUpload,
getUploads: getUploads,
archive: archive,
calcProcLog: calcProcLog,
archiveUrl: archiveUrl,
repo: repo,
repoAll: repoAll,
getMetaInfo: getMetaInfo
}