Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • nomad-lab/nomad-FAIR
  • pgoel/nomad-FAIR
  • jpd47/nomad-FAIR
3 results
Show changes
Commits on Source (74)
Showing
with 288 additions and 111 deletions
......@@ -453,3 +453,4 @@ nexus.obj
celerybeat-schedule.dir
celerybeat-schedule.dat
celerybeat-schedule.bak
celerybeat-schedule.db
......@@ -30,15 +30,9 @@ variables:
workflow:
rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BEFORE_SHA == "0000000000000000000000000000000000000000"
when: never
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push"
when: never
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_PIPELINE_SOURCE == "schedule"
- if: $CI_COMMIT_BRANCH
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG
variables:
DOCKER_TAG: ${CI_COMMIT_REF_NAME}
......@@ -158,7 +152,6 @@ python package clean up:
script:
- python scripts/cleanup_packages.py
rules:
- if: $CI_COMMIT_BRANCH == "develop" && $NIGHTLY
- when: manual
allow_failure: true
......@@ -210,6 +203,7 @@ check python dependencies:
before_script:
- scripts/check_elastic.sh
- uv pip install -e ".[dev]"
- uv pip install -r default_plugins.txt -c requirements-dev.txt # required until all legacy tests are removed
generate pytest timings:
extends: .base_test
......@@ -353,13 +347,13 @@ build python package:
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION
- mkdir -p $CI_PROJECT_DIR/dist
- cp -r /app/dist/* $CI_PROJECT_DIR/dist
- cp /app/tests/data/parsers/vasp/vasp.xml $CI_PROJECT_DIR/
- cp /app/tests/data/parsers/archive.json $CI_PROJECT_DIR/
artifacts:
expire_in: 1 days
when: on_success
paths:
- dist/
- vasp.xml
- archive.json
build final image:
stage: test
......@@ -389,8 +383,8 @@ install tests:
- pip install dist/nomad-*.tar.gz
- python -c 'import nomad.cli'
- python -c 'from nomad.client import ArchiveQuery'
- python -m nomad.cli parse vasp.xml
- uv pip install git+https://github.com/nomad-coe/nomad-parser-example.git@9312f1e
- python -m nomad.cli parse --skip-normalizers archive.json
- uv pip install git+https://github.com/nomad-coe/nomad-parser-example.git@0b0035d
- python -m exampleparser tests/data/examples/example.out
.tag image:
......@@ -504,7 +498,6 @@ deploy develop:
reports:
dotenv: build.env
rules:
- if: $CI_COMMIT_BRANCH == "develop" && $NIGHTLY
- when: manual
allow_failure: true
......@@ -544,9 +537,11 @@ python package:
- pip install twine
script: twine upload -u gitlab-ci-token -p ${CI_JOB_TOKEN} --repository-url https://gitlab.mpcdf.mpg.de/api/v4/projects/${CI_PROJECT_ID}/packages/pypi dist/nomad-lab-*.tar.gz
rules:
- if: $CI_COMMIT_BRANCH == "develop" && $NIGHTLY
when: on_success
- when: manual
allow_failure: true
- if: $CI_COMMIT_BRANCH == "develop" || $CI_COMMIT_TAG
- if: $CI_COMMIT_TAG
pypi package:
stage: release
......
......@@ -35,15 +35,9 @@
[submodule "dependencies/normalizers/simulation/workflow"]
path = dependencies/normalizers/simulation/workflow
url = https://github.com/nomad-coe/nomad-normalizer-plugin-simulation-workflow.git
[submodule "dependencies/schema/simulation/workflow"]
path = dependencies/schema/simulation/workflow
url = https://github.com/nomad-coe/nomad-schema-plugin-simulation-workflow.git
[submodule "dependencies/normalizers/simulation/dos"]
path = dependencies/normalizers/simulation/dos
url = https://github.com/nomad-coe/nomad-normalizer-plugin-dos.git
[submodule "dependencies/schema/simulation/run"]
path = dependencies/schema/simulation/run
url = https://github.com/nomad-coe/nomad-schema-plugin-run.git
[submodule "dependencies/normalizers/simulation/band_structure"]
path = dependencies/normalizers/simulation/band_structure
url = https://github.com/nomad-coe/nomad-normalizer-plugin-bandstructure.git
......
......@@ -20,7 +20,7 @@
# If you need more help, visit the Dockerfile reference guide at
# https://docs.docker.com/engine/reference/builder/
FROM node:16.15 AS base_node
FROM node:20 AS base_node
FROM python:3.11-slim AS base_python
# Keeps Python from buffering stdout and stderr to avoid situations where
# the application crashes without emitting any logs due to buffering.
......@@ -109,7 +109,7 @@ FROM base_node AS dev_node
WORKDIR /app/gui
ENV PATH /app/node_modules/.bin:$PATH
ENV NODE_OPTIONS "--max_old_space_size=4096"
ENV NODE_OPTIONS "--max_old_space_size=4096 --openssl-legacy-provider"
# Fetch and cache all (but only) the dependencies
COPY gui/yarn.lock gui/package.json ./
......@@ -199,11 +199,6 @@ RUN pip install nomad-lab-*.tar.gz
COPY default_plugins.txt .
RUN uv pip install -r default_plugins.txt -c requirements.txt
# Reduce the size of the packages
RUN find /usr/local/lib/python3.11/ -type d -name 'tests' ! -path '*/networkx/*' -exec rm -r '{}' + \
&& find /usr/local/lib/python3.11/ -type d -name 'test' -exec rm -r '{}' + \
&& find /usr/local/lib/python3.11/site-packages/ -name '*.so' ! -path '*/h5py/*' ! -path '*/quippy*/*' -print -exec sh -c 'file "{}" | grep -q "not stripped" && strip -s "{}"' \;
# ================================================================================
# We use slim for the final image
......
......@@ -22,10 +22,6 @@ include dependencies/parsers/workflow/README.md
recursive-include dependencies/parsers/workflow/workflowparsers *.py nomad_plugin.yaml
include dependencies/normalizers/simulation/workflow/README.md
recursive-include dependencies/normalizers/simulation/workflow/simulationworkflownormalizer *.py nomad_plugin.yaml
include dependencies/schema/simulation/workflow/README.md
recursive-include dependencies/schema/simulation/workflow/simulationworkflowschema *.py nomad_plugin.yaml
include dependencies/schema/simulation/run/README.md
recursive-include dependencies/schema/simulation/run/runschema *.py nomad_plugin.yaml
include dependencies/normalizers/simulation/dos/README.md
recursive-include dependencies/normalizers/simulation/dos/dosnormalizer *.py nomad_plugin.yaml
include dependencies/normalizers/simulation/band_structure/README.md
......
git+https://github.com/FAIRmat-NFDI/nomad-perovskite-solar-cells-database.git@f394d21abafeb659729af0c94e5f90949fce01c1
git+https://github.com/FAIRmat-NFDI/nomad-porous-materials.git@795a53f35ffa52604cf7971c16cc3d4f9ce80f2f
git+https://github.com/FAIRmat-NFDI/nomad-aitoolkit.git@70e6261c746e579ad0f177a965d16134efb50c35
git+https://github.com/FAIRmat-NFDI/nomad-aitoolkit.git@b203f8eb28dc4b6771a39d5c5b7ad3d9d8583ac8
nomad-simulations==0.0.1
pynxtools[convert]==0.5.0
git+https://github.com/nomad-coe/nomad-schema-plugin-run.git@f5d348fec5cef89e76021b42e48ff49b1832034b
git+https://github.com/nomad-coe/nomad-schema-plugin-simulation-workflow.git@decd6bc6f7df21513c56f276dc663977c44a22b1
Subproject commit f594bd6bbdcb275020a5109885bf6b613a76d9d1
Subproject commit 05c632cb3e896a97c0e26a914ad42191444acc19
Subproject commit 81682c80f3ac5b0ace36518ef6191d860f804ce6
Subproject commit 8e9e6d02435ac27af6bc199bd55b6528d484c36d
Subproject commit f18533c7075d5029a386ecb788a86c07bfdfec96
Subproject commit fd32d79dff815a62a48e66d72bf709863964455b
......@@ -59,19 +59,19 @@ git submodule update --init
### Set up a Python environment
The NOMAD code currently targets Python 3.9. You should work in a Python virtual environment.
The NOMAD code currently requires Python 3.11. You should work in a Python virtual environment.
#### Pyenv
If your host machine has an older version installed,
you can use [pyenv](https://github.com/pyenv/pyenv){:target="_blank"} to use Python 3.9 in parallel with your
you can use [pyenv](https://github.com/pyenv/pyenv){:target="_blank"} to use Python 3.11 in parallel with your
system's Python.
#### Virtualenv
Create a virtual environment. It allows you
to keep NOMAD and its dependencies separate from your system's Python installation.
Make sure that the virtual environment is based on Python 3.9.
Make sure that the virtual environment is based on Python 3.11.
Use either the built-in `venv` module (see example) or [virtualenv](https://pypi.org/project/virtualenv/){:target="_blank"}.
```shell
......@@ -85,7 +85,7 @@ If you are a conda user, there is an equivalent, but you have to install `pip` a
right Python version while creating the environment.
```shell
conda create --name nomad_env pip python=3.9
conda create --name nomad_env pip python=3.11
conda activate nomad_env
```
......@@ -111,10 +111,10 @@ essential for handling HDF5 files, must be installed on most Unix/Linux systems.
The absence of these libraries can lead to issues during installation or runtime.
For macOS (using Homebrew, pre-compiled binaries for `libmagic` are included in the `pylibmagic` library):
For macOS:
```bash
brew install hdf5
brew install hdf5 libmagic file-formula
```
For Windows (pre-compiled binaries for `hdf5` are included in the dependencies):
......@@ -319,10 +319,7 @@ yarn
yarn start
```
Note that the current codebase requires Node.js version 16.20. If you have a newer version installed on your system, you may need to downgrade or use the following workaround as described [here](https://codingbeautydev.com/blog/node-err-ossl-evp-unsupported/) before `yarn start` works properly.
```
export NODE_OPTIONS=--openssl-legacy-provider
```
Note that the current codebase requires Node.js version 20.
### JupyterHub
......
......@@ -36,9 +36,9 @@ curl localhost/nomad-oasis/alive
- Open [http://localhost/nomad-oasis](http://localhost/nomad-oasis){:target="_blank"} in your browser.
To run NORTH (the NOMAD Remote Tools Hub), the `hub` container needs to run docker and
To run NORTH (the NOMAD Remote Tools Hub), the `north` container needs to run docker and
the container has to be run under the docker group. You need to replace the default group
id `991` in the `docker-compose.yaml`'s `hub` section with your systems docker group id.
id `991` in the `docker-compose.yaml`'s `north` section with your systems docker group id.
Run `id` if you are a docker user, or `getent group | grep docker` to find our your
systems docker gid. The user id 1000 is used as the nomad user inside all containers.
......@@ -182,7 +182,7 @@ a version tag (format is `vX.X.X`, you find all releases [here](https://gitlab.m
- All containers will be named `nomad_oasis_*`. These names can be used later to reference the container with the `docker` cmd.
- The services are setup to restart `always`, you might want to change this to `no` while debugging errors to prevent indefinite restarts.
- Make sure that the `PWD` environment variable is set. NORTH needs to create bind mounts that require absolute paths and we need to pass the current working directory to the configuration from the PWD variable (see hub service in the `docker-compose.yaml`).
- The `hub` service needs to run docker containers. We have to use the systems docker group as a group. You might need to replace `991` with your
- The `north` service needs to run docker containers. We have to use the systems docker group as a group. You might need to replace `991` with your
systems docker group id.
#### nomad.yaml
......@@ -534,3 +534,68 @@ Here are some common problems that may occur in an OASIS installation:
The underlying reason is a time difference between the two different servers (the one creating the JWT, and the one that is validating it) as these might very well be different physical machines. To fix this problem, you should ensure that the time on the servers is up to date (e.g. a network port on the server may be closed, preventing it from synchronizing the time). Note that the servers do not need to be on the same timezone, as internally everything is converted to UTC+0.
### NOMAD in networks with restricted Internet access
Some network environments do not allow direct Internet connections, and require the use of an outbound proxy.
However, NOMAD needs to connect to the central user management or elasticsearch thus requires an active Internet
connection (at least on Windows) to work.
In these cases you need to configure docker to use your proxy.
See details via this link [https://docs.docker.com/network/proxy/](https://docs.docker.com/network/proxy/).
An example file `~/.docker/config.json` could look like this.
```json
{
"proxies": {
"default": {
"httpProxy": "http://<proxy>:<port>",
"httpsProxy": "http://<proxy>:<port>",
"noProxy": "127.0.0.0/8,elastic,localhost"
}
}
}
```
Since not all used services respect proxy variables, one also has to change the docker compose config file `docker-compose.yaml` for elastic search to:
```yaml hl_lines="7 8"
elastic:
restart: unless-stopped
image: docker.elastic.co/elasticsearch/elasticsearch:7.17.1
container_name: nomad_oasis_elastic
environment:
- ES_JAVA_OPTS=-Xms512m -Xmx512m
- ES_JAVA_OPTS=-Djava.net.useSystemProxies=true
- ES_JAVA_OPTS=-Dhttps.proxyHost=<proxy> -Dhttps.proxyPort=port -Dhttps.nonProxyHosts=localhost|127.0.0.1|elastic
- discovery.type=single-node
volumes:
- elastic:/usr/share/elasticsearch/data
healthcheck:
test:
- "CMD"
- "curl"
- "--fail"
- "--silent"
- "http://elastic:9200/_cat/health"
interval: 10s
timeout: 10s
retries: 30
start_period: 60s
```
Unfortunately there is no way yet to use the NORTH tools with the central user management, since the jupyterhub spawner does not respect proxy variables.
It has not been tested yet when using an authentication which does not require the proxy, e.g. a local keycloak server.
If you have issues please contact us on discord n the [oasis channel](https://discord.com/channels/1201445470485106719/1205480348050395136).
### NOMAD behind a firewall
It is also possible that your docker container is not able to talk to each other.
This could be due to restrictive settings on your server.
The firewall shall allow both inbound and outbound HTTP and HTTPS traffic.
The corresponding rules need to be added.
Furthermore, inbound traffic needs to be enabled for the port used on the `nginx` service.
In this case you should make sure this test runs through:
[https://docs.docker.com/network/network-tutorial-standalone/](https://docs.docker.com/network/network-tutorial-standalone/)
If not please contact your server provider for help.
\ No newline at end of file
# How to install plugins into a NOMAD Oasis
[Plugins](../plugins/plugins.md) allow the customization of a NOMAD deployment in terms of which apps, normalizers, parsers and schema packages are available. In the following we will show to how to install plugins into a NOMAD Oasis.
[Plugins](../plugins/plugins.md) allow the customization of a NOMAD deployment in terms of which apps, normalizers, parsers and schema packages are available. In order for these customization to be activated, they have to be installed into an Oasis.
## Option 1: Mount the plugin code
Oasis is controlled and run through a `docker-compose.yaml` file, which specifies the different software services and how they interact. Some of these services are using a Docker image that contains the actual NOMAD software. It is in this image where we will need to install any additional plugins with `pip install`.
The following sections contain some alternatives for achieving this, with the first option being the preferred one.
## Option 1: Create a new customized NOMAD Oasis distribution with your plugins
When initially starting to create a customized NOMAD Oasis distribution, it is strongly advised that you create a GitHub repository to persist your work, collaborate with coworkers and also to automate the building and distribution of your custom image. To streamline this process, we have created a [GitHub template repository](https://github.com/FAIRmat-NFDI/nomad-distribution-template) that helps with all of this. It can do the following for you:
- Plugins are controlled with a simple `plugins.txt` file where it is easy to install plugins from PyPI, Git repositories, local files, etc.
- The automatic pipeline will create a new Docker image for your Oasis. This image will also be stored on GitHub servers.
- Initial modifications to the `docker-compose.yaml` are done automatically so you can boot up the software directly.
To learn more, head over to the [template repository](https://github.com/FAIRmat-NFDI/nomad-distribution-template) and follow the instructions there.
## Option 2: Only create a customized Docker image
If you already have an existing NOMAD Oasis setup, or do not wish to use the template, you can also just create a new Docker image which has your plugin installed as a `pip` package. For this approach, you need to create a new `Dockerfile`, which runs the installation step on top of our default image. The basic idea is that your Dockerfile looks something like this:
```Dockerfile
FROM gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair:latest
# Switch to root user to install packages to the system with pip
USER root
# Install your plugin here, e.g.:
RUN pip install git+https://<repository_url>
# Remember to switch back to the 'nomad' user
USER nomad
```
Depending on how your plugin code is distributed, you have several options for the actual install steps:
1. Plugin published in PyPI:
```sh
RUN pip install <package_name>
```
2. Plugin code available in GitHub:
```sh
RUN pip install git+https://<repository_url>
```
3. Plugin published in MPCDF GitLab registry:
```sh
RUN pip install nomad-example-schema-plugin --index-url https://gitlab.mpcdf.mpg.de/api/v4/projects/2187/packages/pypi/simple
```
4. Copy plugin folder from host machine. Note that the folder needs to be in the [Docker build context](https://docs.docker.com/build/building/context/):
```sh
COPY <nomad-plugin-folder-name> <nomad-plugin-folder-name>
RUN cd <nomad-plugin-folder-name> && pip install .
```
The customized image can then be built like this:
```
docker build -t nomad-with-plugins .
```
This will create a new image with the tag `nomad-with-plugins`, which you can use in your `docker-compose.yaml` file:
```yaml
#image: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-fair:latest
image: nomad-with-plugins
```
## Option 3 (deprecated): Mount the plugin code directly into the container
!!! warning "Attention"
This option only works with the old plugin mechanism that is based on `nomad_plugin.yaml` files instead of [Python entry points](https://setuptools.pypa.io/en/latest/userguide/entry_point.html).
The NOMAD docker image adds the folder `/app/plugins` to the `PYTHONPATH`. This means that you can mount your code into the `/app/plugins` directory via the volumes section of the `app` and `worker` services in your `docker-compose.yaml`.
......@@ -54,55 +128,3 @@ curl localhost/nomad-oasis/alive
services, update the commands accordingly.
Read the [Oasis install guide](install.md) for more details.
## Option 2: Create a derived Docker image with plugin installed via `pip`
Instead of mounting the code into an existing image, you can also create a new, derived image which has your plugin installed as a `pip` package. For this you will to create a new `Dockerfile`, which runs the installation step. The basic idea is that your Dockerfile looks something like this:
```Dockerfile
--8<-- "examples/plugins/schema/Dockerfile"
```
The image can then be build like this:
```
docker build -t nomad-with-plugins .
```
Depending on how your plugin code is distributed, you have several options for the actual install steps:
1. Plugin published in PyPI:
```sh
RUN pip install <package_name>`
```
2. Plugin code available in GitHub:
```sh
RUN pip install git+https://<repository_url>
```
3. Plugin published in MPCDF GitLab registry:
```sh
RUN pip install nomad-example-schema-plugin --index-url https://gitlab.mpcdf.mpg.de/api/v4/projects/2187/packages/pypi/simple
```
4. Copy plugin code from host machine:
```sh
RUN pip install build
COPY \
nomadschemaexample \
tests \
README.md \
LICENSE \
pyproject.toml \
.
RUN python -m build --sdist
RUN pip install dist/nomad-schema-plugin-example-*.tar.gz
```
# How to write an example upload
Example uploads can be used to add representative collections of data for your plugin. Example uploads are available for end-users in the *Uploads*-page under the *Add example uploads*-button. There users can instantiate an example upload with a click. This can be very useful for educational or demonstration purposes but also for testing.
This documentation shows you how to write a plugin entry point for an example upload. You should read the [documentation on getting started with plugins](./plugins.md) to have a basic understanding of how plugins and plugin entry points work in the NOMAD ecosystem.
## Getting started
You can use our [template repository](https://github.com/FAIRmat-NFDI/nomad-plugin-template) to create an initial structure for a plugin containing an example upload. The relevant part of the repository layout will look something like this:
```txt
nomad-example
├── src
│ ├── nomad_example
│ │ ├── example_uploads
│ │ │ ├── getting_started
│ │ │ ├── __init__.py
├── LICENSE.txt
├── README.md
├── MANIFEST.in
└── pyproject.toml
```
See the documentation on [plugin development guidelines](./plugins.md#plugin-development-guidelines) for more details on the best development practices for plugins, including linting, testing and documenting.
## Example upload entry point
The entry point is an instance of a `ExampleUploadEntryPoint` or its subclass. It defines basic information about your example upload and is used to automatically load the associated data into a NOMAD distribution. The entry point should be defined in `*/example_uploads/__init__.py` like this:
```python
from nomad.config.models.plugins import ExampleUploadEntryPoint
myexampleupload = ExampleUploadEntryPoint(
title = 'My Example Upload',
category = 'Examples',
description = 'Description of this example upload.',
path='example_uploads/getting_started
)
```
The default method for including the upload data is to place it in the plugin repository and use the `path` field to specify the location with respect to the package root. You can learn more about different data loading options in the next section. In the reference you can also see all of the available [configuration options for a `ExampleUploadEntryPoint`](../../reference/plugins.md#exampleuploadentrypoint).
The entry point instance should then be added to the `[project.entry-points.'nomad.plugin']` table in `pyproject.toml` in order for the example upload to be automatically detected:
```toml
[project.entry-points.'nomad.plugin']
myexampleupload = "nomad_example.example_uploads:myexampleupload"
```
## Including data in an example upload
There are three main ways to include data in an example upload:
1. Data stored directly in the plugin package using `path`:
This is the default method that assumes you simply store the data under a path in the plugin source code. This is very convenient if you have relative small example data and wish to track this in version control. The path should be given relative to the package installation location (`src/<package-name>`), and you should ensure that the data is distributed with your Python package. Distribution of additional data files in Python packages is controlled with the `MANIFEST.in` file. If you create a plugin with our [template](https://github.com/FAIRmat-NFDI/nomad-plugin-template), the `src/<package-name>/example_uploads` folder is included automatically in `MANIFEST.in`. If you later add an example upload entry point to your plugin, remember to include the folder by adding the following line to `MANIFEST.in`:
```sh
graft src/<package-name>/<path>
```
2. Data retrieved online during app startup using `url`:
If your example uploads are very large (>100MB), storing them in Git may become unpractical. In order to deal with larger uploads, they can be stored in a separate online service. To load such external resources, you can specify a `url` parameter to activate online data retrieval. This will retrieve the large online file once upon the first app launch and then cache it for later use:
```python
from nomad.config.models.plugins import ExampleUloadEntryPoint
myexampleupload = ExampleUploadEntryPoint(
name = 'MyExampleUpload',
description = 'My custom example upload.',
url='http://my_large_file_address.zip
)
```
Note that if the online file changes, you will need to remove the cached file for the new version to be retrieved. You can find the cached file in the package installation location, under folder `example_uploads`.
3. Data retrieved with a custom method:
If the above options do not suite your use case, you can also override the `load`-method of `ExampleUploadEntryPoint` to perform completely custom data loading logic. Note that the loaded data should be saved in the package installation directory in order to be accessible. Check the default `load` function for more details.
```python
from pydantic import Field
from nomad.config.models.plugins import ExampleUploadEntryPoint
class MyExampleUploadEntryPoint(ExampleUploadEntryPoint):
def load(self):
"""Add your custom loading logic here."""
...
```
\ No newline at end of file
......@@ -43,6 +43,7 @@ In the folder structure you can see that a single plugin can contain multiple ty
Plugin entry points represent different types of customizations that can be added to a NOMAD installation. The following plugin entry point types are currently supported:
- [Apps](./apps.md)
- [Example uploads](./example_uploads.md)
- [Normalizers](./parsers.md)
- [Parsers](./parsers.md)
- [Schema packages](./schema_packages.md)
......@@ -59,7 +60,7 @@ myapp = "nomad_example.parsers:myapp"
mypackage = "nomad_example.schema_packages:mypackage"
```
Here it is important to use the `nomad.plugin` group name in the `project.entry-points` header. The plugin name used on the left side (`mypackage`) can be arbitrary, what matters is that the key (`"nomad_example.schema_packages:mypackage"`) is a path pointing to a plugin entry point instance inside the python code. This unique key will be used to identify the plugin entry point when e.g. accessing it to read some of it's configuration values.
Here it is important to use the `nomad.plugin` group name in the `project.entry-points` header. The value on the right side (`"nomad_example.schema_packages:mypackage"`) must be a path pointing to a plugin entry point instance inside the python code. This unique key will be used to identify the plugin entry point when e.g. accessing it to read some of it's configuration values. The name on the left side (`mypackage`) can be set freely.
You can read more about how to write different types of entry points in their dedicated documentation pages or learn more about the [Python entry point mechanism](https://setuptools.pypa.io/en/latest/userguide/entry_point.html).
......@@ -197,3 +198,7 @@ twine upload \
--repository-url https://gitlab.mpcdf.mpg.de/api/v4/projects/2187/packages/pypi \
dist/nomad-example-plugin-*.tar.gz
```
## Installing a plugin
See our documentation on [How to install plugins into a NOMAD Oasis](../oasis/plugins_install.md).
\ No newline at end of file
......@@ -15,8 +15,10 @@ You can run NOMAD parsers from the [command line interface](../../reference/cli.
The parse command will automatically match the right parser to your file and run the parser.
There are two output formats:
- `--show-metadata` a JSON representation of the basic metadata
- `--show-archive` a JSON representation of the full parse results
- `--show-metadata` a json representation of the basic metadata
- `--show-archive` a json representation of the full parse results
- `--preview-plots`: Optionally previews the generated plots.
- `--save-plot-dir <directory>`: Specifies a directory to save the plot images.
```
nomad parse --show-archive <path-to-your-mainfile-code-output-file>
......
......@@ -11,6 +11,7 @@ Plugins allow one to add Python-based functionality to NOMAD without a custom NO
This is a list of the available plugin entry point configuration models.
{{ pydantic_model('nomad.config.models.plugins.AppEntryPoint') }}
{{ pydantic_model('nomad.config.models.plugins.ExampleUploadEntryPoint') }}
{{ pydantic_model('nomad.config.models.plugins.NormalizerEntryPoint') }}
{{ pydantic_model('nomad.config.models.plugins.ParserEntryPoint') }}
{{ pydantic_model('nomad.config.models.plugins.SchemaPackageEntryPoint') }}
......
......@@ -169,7 +169,7 @@ and the "Allow GitHub Actions to create and approve pull requests" options and c
## Setting up the python environment
### Creating a virtual environment
Before we can start developing we recommend to create a virtual environment using Python 3.9
Before we can start developing we recommend to create a virtual environment using Python 3.11
```sh
python3.11 -m venv .pyenv
......
......@@ -74,7 +74,7 @@ select = [
"UP",
# isort
"I",
# pylint
# pylint
"PL",
]
......
const CracoWorkboxPlugin = require('craco-workbox')
module.exports = {
plugins: [{
plugin: CracoWorkboxPlugin
}]
plugins: [{
plugin: CracoWorkboxPlugin
}],
webpack: {
configure: (webpackConfig, { env, paths }) => {
// Add a rule to handle .mjs files
webpackConfig.module.rules.push({
test: /\.mjs$/,
include: /node_modules/,
type: 'javascript/auto'
})
return webpackConfig
}
}
}