Commit 45a4c127 authored by dboe's avatar dboe
Browse files

Update template

parent 7dae6863
Pipeline #78821 passed with stage
in 1 minute and 20 seconds
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/
# pyc files:
*.pyc
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
# Flask instance folder
instance/
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# IPython Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# dotenv
.env
# virtualenv
venv/
ENV/
# Spyder project settings
.spyderproject
# vim files
*.sw*
# folder
tmp/
.idea
docs/source/
### Git ###
*.orig
### Linux ###
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### Windows ###
# Windows thumbnail cache files
Thumbs.db
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
image: python:latest
# Change pip's cache directory to be inside the project directory since we can
# only cache local items.
variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
# Pip's cache doesn't store the python packages
# https://pip.pypa.io/en/stable/reference/pip_install/#caching
#
# If you want to also cache the installed packages, you have to install
# them in a virtualenv and cache it as well.
cache:
paths:
- .cache/pip
- venv/
stages:
- build
- test
- deploy
before_script:
- pip install virtualenv
- virtualenv venv
- source venv/bin/activate
dist:
stage: build
script:
- python setup.py bdist_wheel
# an alternative approach is to install and run:
- pip install dist/*
# run the command here
artifacts:
paths:
- dist/*.whl
expire_in: 1h
only:
- tags
pages:
stage: build
script:
- pip install sphinx sphinx_rtd_theme recommonmark
- cd docs
- make html
- cd ..
- mkdir -p public
- rm -rf public/*
- mv docs/_build/html/* public/ # add it to pages. Pages is exposing public/index.html
only:
- master
cache:
paths:
- public
artifacts:
paths:
- public
- docs
lint:
stage: test
before_script:
- pip install -q flake8
script:
- flake8
test:
stage: test
script:
- pip --version
- pip install tox # you can also use tox
- tox
coverage: '/^TOTAL.+?(\d+\%)$/'
artifacts:
# paths:
# pa- report/unit
reports:
junit:
- report/junit.xml
gitlab-release:
image: inetprocess/gitlab-release
stage: deploy
before_script:
- echo "Starting release"
script:
- gitlab-release --message 'Automatic release' dist/* # Note:
only:
- tags
pypi:
image: docker.km3net.de/base/python:3
stage: deploy
cache: {}
before_script:
- echo "Starting upload to pypi"
script:
# Check if current_version is already uploaded
- VERSION=$((python -c "import configparser; config = configparser.ConfigParser(); config.read('setup.cfg'); print(config['bumpversion']['current_version'])") 2>&1)
- MODULE_NAME=$((python -c "import configparser; config = configparser.ConfigParser(); config.read('setup.cfg'); print(config['metadata']['name'])") 2>&1)
- PACKAGE_JSON_URL="https://pypi.org/pypi/$MODULE_NAME/json"
- apt-get install -qq -y jq
- PYPI_VERSIONS=$(curl -s "$PACKAGE_JSON_URL" | jq -r '.releases | keys | .[]' | sort -V)
- if [[ $PYPI_VERSIONS =~ $VERSION ]]; then echo "Version $VERSION is already uploaded!"; exit 1; fi
# Version not already uploaded so do it now.
- echo "Uploading version $VERSION"
- pip install -U twine
- python setup.py sdist
- twine upload dist/*
rules:
# for debuggin: git commit -am "deb" && git push && bumpversion patch && git tag -l --sort=-v:refname | head -n 1 | git push origin
- if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/
# .readthedocs.yml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/conf.py
# Optionally set the version of Python and requirements required to build your docs
python:
version: 3.7
install:
- requirements: docs/requirements.txt
=======
Credits
=======
This package was created with Cookiecutter_ and the `dboe/dough`_ project template.
.. _Cookiecutter: https://github.com/audreyr/cookiecutter
.. _`dboe/dough`: https://gitlab.com/dboe/dough
Development Lead
----------------
* Daniel Böckenhoff <dboe@ipp.mpg.de>
Contributors
------------
None yet. Why not be the first?
.. highlight:: shell
============
Contributing
============
Contributions are welcome, and they are greatly appreciated! Every little bit
helps, and credit will always be given.
You can contribute in many ways:
Types of Contributions
----------------------
Report Bugs
~~~~~~~~~~~
Report bugs at https://gitlab.mpcdf.mpg.de/dboe/tfields/issues.
If you are reporting a bug, please include:
* Your operating system name and version.
* Any details about your local setup that might be helpful in troubleshooting.
* Detailed steps to reproduce the bug.
If you want quick feedback, it is helpful to mention speicific developers
(@devloper_name) or @all. This will trigger a mail to the corresponding developer(s).
Fix Bugs
~~~~~~~~
Look through the repository issues for bugs. Anything tagged with "bug" and "help
wanted" is open to whoever wants to implement it.
Implement Features
~~~~~~~~~~~~~~~~~~
Look through the remote issues for features. Anything tagged with "enhancement"
and "help wanted" is open to whoever wants to implement it.
Write Documentation
~~~~~~~~~~~~~~~~~~~
`tfields` could always use more :ref:`documentation<Documentation>`, whether as part of the
official `tfields` docs, in docstrings, or even on the web in blog posts,
articles, and such.
Write Unittests or Doctests
~~~~~~~~~~~~~~~~~~~~~~~~~~~
`tfields` profits a lot from better :ref:`testing<Testing>`. We encourage you to add unittests
(in the `tests` directory) or doctests (as part of docstrings or in the documentation).
Submit Feedback
~~~~~~~~~~~~~~~
The best way to send feedback is to file an `Issue <https://gitlab.mpcdf.mpg.de/dboe/tfields/issues>`_.
If you are proposing a feature:
* Explain in detail how it would work.
* Keep the scope as narrow as possible, to make it easier to implement.
* Remember that this is a volunteer-driven project, and that contributions
are welcome :)
Get Started!
------------
Ready to contribute? Here's how to set up `tfields` for local development.
1. Fork the `tfields` repo.
2. Clone your fork locally::
$ git clone git@gitlab.mpcdf.mpg.de:dboe/tfields.git
3. Set up your fork for local development::
$ cd tfields/
$ pip install .[dev]
4. Step 3. already installed `pre-commit <https://pre-commit.com/>`_. Initialize it by running::
$ pre-commit install
5. Create a branch for local development::
$ git checkout -b name-of-your-bugfix-or-feature
Now you can make your changes locally.
6. When you're done making changes, check that your changes pass flake8 and the
tests::
$ make test
7. Commit your changes and push your branch to origin::
$ git add .
$ git commit -m "Your detailed description of your changes."
$ git push origin name-of-your-bugfix-or-feature
8. Submit a pull request through the repository website.
Pull Request Guidelines
-----------------------
Before you submit a pull request, check that it meets these guidelines:
1. The pull request should include tests.
2. If the pull request adds functionality, the docs should be updated. Put
your new functionality into a function with a docstring, and add the
feature to the list in README.rst.
3. The pull request should work for Python 3.5, 3.6, 3.7 and 3.8, and for PyPy. Check
https://gitlab.mpcdf.mpg.de/dboe/tfields/-/merge_requests
and make sure that the tests pass for all supported Python versions.
Testing
-------
To run tests, use::
$ make test
To run a subset of tests, you have the following options::
$ pytest tests/test_package.py
$ pytest tests/test_package.py::Test_tfields::test_version_type
$ pytest --doctest-modules docs/usage.rst
$ pytest --doctest-modules tfields/core.py -k "MyClass.funciton_with_doctest"
Use the '--trace' option to directly jump into a pdb debugger on fails. Check out the coverage of your api with::
$ make coverage
Documentation
-------------
To compile the documentation (including automatically generated module api docs), run::
$ make doc
Use doctests as much as possible in order to have tested examples in your documentation.
Styleguide
-----------
Please follow the `google style guide <https://google.github.io/styleguide/pyguide.html>`_ illustrated
by `this example <https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html>`_.
Deploying
---------
A reminder for the maintainers on how to deploy.
Make sure all your changes are committed.
Then run::
$ bump2version patch # possible: major / minor / patch
$ git push
$ git push --tags
or use the convenient alias for the above (patch increases only)::
$ make publish
The CI will then deploy to PyPI if tests pass.
MIT License
Copyright (c) 2020, Daniel Böckenhoff
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
SHELL := /bin/bash # Use bash syntax
CURRENT_PATH := $(shell pwd)
MODULE := $(shell basename "$(CURRENT_PATH)")
VERSION := $(shell python -c "import sys; import $(MODULE); sys.stdout.write($(MODULE).__version__)")
SOURCES := $(shell find $(MODULE) -name '*.py')
DOCUMENTATION := $(shell find . -name '*.rst')
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SPHINXSOURCEDIR = ./docs
SPHINXBUILDDIR = docs/_build
GITSTATUS = $(shell git status --porcelain)
part ?= patch
test: FORCE
flake8 rna tests
py.test
coverage:
# coverage run $(MODULE) test
py.test --cov=$(MODULE) || true
# coverage report
coverage html
python -m webbrowser htmlcov/index.html
clean:
coverage erase
rm -rf htmlcov
rm -rf docs/_build
rm -rf docs/source
rm -rf dist
rm -rf report
rm -rf .tox
rm -rf .pytest_cache
rm -rf *.egg-info
pre-commit clean
publish:
# call optional with argument: make part=minor publish
bump2version $(part) # possible: major / minor / patch
git push
git push --tags
untag:
# remove last tag. mostly, because publishing failed
git tag -d v$(VERSION)
git push origin :refs/tags/v$(VERSION)
requirements: setup.cfg
# We have all the information in the setup.cfg file. For some reasons (e.g. bug in setuptools or limitations to use setup.cfg in readthedocs) we still need a requirements file
python -c "import configparser; import os; config = configparser.ConfigParser(); config.read('setup.cfg'); deps = config['metadata']['install_requires'].split('\n'); deps = [x for x in deps if x]; head = '# Autogenerated by Makefile from setup.cfg install_requies section. Remove this line if you want to fix this file.'; path = 'requirements.txt'; f = open(path, 'r') if os.path.exists(path) else None; line = f.readline() if f else ''; quit('User defined requirements already existing.') if f and not line.startswith(head[:20]) else None; f = open('requirements.txt', 'w'); f.write('\n'.join([head] + deps))"
doc: Makefile $(SOURCES) $(DOCUMENTATION) docs/conf.py docs/apidoc-template/*
# link apidoc to source and build html documentation with sphinx
python setup.py build_sphinx
# manual version would be
# # link apidoc to source
# sphinx-apidoc -o docs/source/ $(MODULE)
# # build html documentation with sphinx
# # @$(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(0)
# sphinx-build -M html docs docs/_build
#
# open the html slides
python -m webbrowser docs/_build/html/index.html
update:
# get up to date with the cookiecutter template 'dough'
# first check that no changes are existing
@echo $(GITSTATUS)
@if [ -z $(GITSTATUS) ]; then \
echo "Working directory clean."; \
else \
git status; \
echo "Your status is not clean! I can not update!"; \
exit 1; \
fi
# Uninstall pre-commit
pre-commit uninstall
# Starting upgrade
cookiecutter_project_upgrader
# Install pre-commit again
pre-commit install
FORCE: ;
=========================
Introduction to `tfields`
=========================
.. pypi
.. image:: https://img.shields.io/pypi/v/tfields.svg
:target: https://pypi.python.org/pypi/tfields
.. ci
.. image:: https://img.shields.io/travis/dboe/tfields.svg
:target: https://travis-ci.com/dboe/tfields
.. image:: https://gitlab.mpcdf.mpg.de/dboe/tfields/badges/master/pipeline.svg
:target: https://gitlab.mpcdf.mpg.de/dboe/tfields/commits/master
.. coverage
.. image:: https://gitlab.mpcdf.mpg.de/dboe/tfields/badges/master/coverage.svg
:target: https://gitlab.mpcdf.mpg.de/dboe/tfields/commits/master
.. readthedocs
.. image:: https://readthedocs.org/projects/tfields/badge/?version=latest
:target: https://tfields.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
.. pyup crosschecks your dependencies. Github is default, gitlab more complicated: https://pyup.readthedocs.io/en/latest/readme.html#run-your-first-update
.. image:: https://pyup.io/repos/github/dboe/tfields/shield.svg
:target: https://pyup.io/repos/github/dboe/tfields/
:alt: Updates
.. image:: https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white
:target: https://github.com/pre-commit/pre-commit
:alt: pre-commit
Tensors, tensor fields, graphs, mesh manipulation, CAD and more on the basis of numpy.ndarrays. All objects keep track of their coordinate system. Symbolic math operations work for object manipulation.
Licensed under the ``MIT License``
Resources
---------
* Source code: https://gitlab.mpcdf.mpg.de/dboe/tfields
* Documentation: https://tfields.readthedocs.io
* Pypi: https://pypi.python.org/pypi/tfields
Features
--------
The following features should be highlighted:
* TODO
# Minimal makefile for Sphinx documentation
#
MODULE_PATH := $(shell cd ..; pwd)
MODULE := $(shell basename "$(MODULE_PATH)")
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build