Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
nomad-lab
nomad-FAIR
Commits
859810f4
Commit
859810f4
authored
Sep 20, 2018
by
Markus Scheidgen
Browse files
Fixed code quality issues.
parent
5c2c1979
Changes
19
Expand all
Hide whitespace changes
Inline
Side-by-side
.gitlab-ci.yml
View file @
859810f4
...
...
@@ -16,7 +16,9 @@ before_script:
linting
:
stage
:
qa
script
:
-
echo "Hello World"
-
pycodestyle --ignore=E501,E701 nomad tests
-
pylint --load-plugins=pylint_mongoengine nomad tests
-
mypy --ignore-missing-imports --follow-imports=silent --no-strict-optional nomad tests
tests
:
stage
:
test
...
...
.pylintrc
0 → 100644
View file @
859810f4
This diff is collapsed.
Click to expand it.
.vscode/settings.json
View file @
859810f4
...
...
@@ -12,10 +12,9 @@
"git.enableSmartCommit"
:
true
,
"eslint.autoFixOnSave"
:
true
,
"python.linting.pylintArgs"
:
[
"--disable=all"
,
"--load-plugins=pylint_mongoengine"
,
"--enable=F,E,unreachable,duplicate-key,unnecessary-semicolon,global-variable-not-assigned,unused-variable,binary-op-exception,bad-format-string,anomalous-backslash-in-string,bad-open-mode,unused-import"
],
"python.linting.pep8Path"
:
"pycodestyle"
,
"python.linting.pep8Enabled"
:
true
,
"python.linting.pep8Args"
:
[
"--ignore=E501,E701"
],
"python.linting.mypyEnabled"
:
true
,
...
...
nomad/__init__.py
View file @
859810f4
...
...
@@ -10,4 +10,4 @@
# distributed under the License is distributed on an"AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
\ No newline at end of file
# limitations under the License.
nomad/api.py
View file @
859810f4
...
...
@@ -676,6 +676,7 @@ def call_admin_operation(operation):
return
'done'
,
200
api
.
add_resource
(
UploadsRes
,
'%s/uploads'
%
base_path
)
api
.
add_resource
(
UploadRes
,
'%s/uploads/<string:upload_id>'
%
base_path
)
api
.
add_resource
(
UploadFileRes
,
'%s/uploads/<string:upload_id>/file'
%
base_path
)
...
...
nomad/dependencies.py
View file @
859810f4
...
...
@@ -192,6 +192,7 @@ def prepare() -> None:
for
python_git
in
dependencies
:
python_git
.
prepare
()
if
__name__
==
'__main__'
:
_logger
.
setLevel
(
logging
.
DEBUG
)
prepare
()
nomad/files.py
View file @
859810f4
...
...
@@ -74,7 +74,7 @@ class Objects:
return
os
.
path
.
abspath
(
path
)
@
classmethod
def
open
(
cls
,
bucket
:
str
,
name
:
str
,
ext
:
str
=
None
,
*
args
,
**
kwargs
)
->
IO
:
def
open
(
cls
,
bucket
:
str
,
name
:
str
,
ext
:
str
=
None
,
*
args
,
**
kwargs
)
->
IO
:
""" Open an object like you would a file, e.g. with 'rb', etc. """
try
:
return
open
(
cls
.
_os_path
(
bucket
,
name
,
ext
),
*
args
,
**
kwargs
)
...
...
@@ -82,7 +82,7 @@ class Objects:
raise
KeyError
()
@
classmethod
def
delete
(
cls
,
bucket
:
str
,
name
:
str
,
ext
:
str
=
None
)
->
None
:
def
delete
(
cls
,
bucket
:
str
,
name
:
str
,
ext
:
str
=
None
)
->
None
:
""" Delete a single object. """
try
:
os
.
remove
(
cls
.
_os_path
(
bucket
,
name
,
ext
))
...
...
@@ -90,7 +90,7 @@ class Objects:
raise
KeyError
()
@
classmethod
def
delete_all
(
cls
,
bucket
:
str
,
prefix
:
str
=
''
):
def
delete_all
(
cls
,
bucket
:
str
,
prefix
:
str
=
''
):
""" Delete all files with given prefix, prefix must denote a directory. """
try
:
shutil
.
rmtree
(
cls
.
_os_path
(
bucket
,
prefix
,
ext
=
None
))
...
...
@@ -98,7 +98,7 @@ class Objects:
pass
@
classmethod
def
exists
(
cls
,
bucket
:
str
,
name
:
str
,
ext
:
str
=
None
)
->
bool
:
def
exists
(
cls
,
bucket
:
str
,
name
:
str
,
ext
:
str
=
None
)
->
bool
:
""" Returns True if object exists. """
return
os
.
path
.
exists
(
cls
.
_os_path
(
bucket
,
name
,
ext
))
...
...
@@ -116,7 +116,7 @@ class File:
Attributes:
logger: A structured logger with bucket and object information.
"""
def
__init__
(
self
,
bucket
:
str
,
object_id
:
str
,
ext
:
str
=
None
)
->
None
:
def
__init__
(
self
,
bucket
:
str
,
object_id
:
str
,
ext
:
str
=
None
)
->
None
:
self
.
bucket
=
bucket
self
.
object_id
=
object_id
self
.
ext
=
ext
...
...
nomad/normalizing/fhiaims.py
View file @
859810f4
...
...
@@ -222,8 +222,6 @@ class FhiAimsBaseNormalizer(Normalizer):
# matrix_hits_basis={}
# A=ParseStreamedDicts(sys.stdin)
# while True:
...
...
@@ -274,4 +272,3 @@ class FhiAimsBaseNormalizer(Normalizer):
# if __name__ == "__main__":
# main()
nomad/parsing/__init__.py
View file @
859810f4
...
...
@@ -96,5 +96,5 @@ parsers = [
]
""" Instanciation and constructor based config of all parsers. """
parser_dict
=
{
parser
.
name
:
parser
for
parser
in
parsers
}
parser_dict
=
{
parser
.
name
:
parser
for
parser
in
parsers
}
# type: ignore
""" A dict to access parsers by name. Usually 'parsers/<...>', e.g. 'parsers/vasp'. """
nomad/parsing/backend.py
View file @
859810f4
...
...
@@ -456,7 +456,7 @@ class LocalBackend(LegacyParserBackend):
def
_write
(
json_writer
:
JSONStreamWriter
,
value
:
Any
,
filter
:
Callable
[[
str
,
Any
],
Any
]
=
None
):
filter
:
Callable
[[
str
,
Any
],
Any
]
=
None
):
if
isinstance
(
value
,
list
):
json_writer
.
open_array
()
...
...
@@ -488,7 +488,7 @@ class LocalBackend(LegacyParserBackend):
""" Returns status and potential errors. """
return
(
self
.
_status
,
self
.
_errors
)
def
write_json
(
self
,
out
:
TextIO
,
pretty
=
True
,
filter
:
Callable
[[
str
,
Any
],
Any
]
=
None
):
def
write_json
(
self
,
out
:
TextIO
,
pretty
=
True
,
filter
:
Callable
[[
str
,
Any
],
Any
]
=
None
):
"""
Writes the results stored in the backend after parsing in an 'archive'.json
style format.
...
...
nomad/patch.py
View file @
859810f4
...
...
@@ -7,4 +7,5 @@ import systax.analysis.symmetryanalyzer
def
segfault_protect_patch
(
f
,
*
args
,
**
kwargs
):
return
f
(
*
args
,
**
kwargs
)
systax
.
analysis
.
symmetryanalyzer
.
segfault_protect
=
segfault_protect_patch
nomad/processing/base.py
View file @
859810f4
...
...
@@ -12,18 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from
typing
import
List
,
cast
,
Any
import
types
from
contextlib
import
contextmanager
import
collections
import
inspect
from
typing
import
List
,
Any
import
logging
import
time
import
celery
from
celery
import
Celery
,
Task
from
celery
import
Celery
from
celery.signals
import
after_setup_task_logger
,
after_setup_logger
,
worker_process_init
from
mongoengine
import
Document
,
StringField
,
ListField
,
DateTimeField
,
IntField
,
\
ReferenceField
,
connect
,
ValidationError
,
BooleanField
,
EmbeddedDocument
connect
,
ValidationError
,
BooleanField
from
mongoengine.connection
import
MongoEngineConnectionError
from
mongoengine.base.metaclasses
import
TopLevelDocumentMetaclass
from
pymongo
import
ReturnDocument
...
...
@@ -37,6 +32,7 @@ import nomad.patch # pylint: disable=unused-import
def
mongo_connect
():
return
connect
(
db
=
config
.
mongo
.
users_db
,
host
=
config
.
mongo
.
host
,
port
=
config
.
mongo
.
port
)
if
config
.
logstash
.
enabled
:
def
initialize_logstash
(
logger
=
None
,
loglevel
=
logging
.
DEBUG
,
**
kwargs
):
utils
.
add_logstash_handler
(
logger
)
...
...
@@ -222,9 +218,9 @@ class Proc(Document, metaclass=ProcMetaclass):
def
_continue_with
(
self
,
task
):
tasks
=
self
.
__class__
.
tasks
assert
task
in
tasks
,
'task %s must be one of the classes tasks %s'
%
(
task
,
str
(
tasks
))
assert
task
in
tasks
,
'task %s must be one of the classes tasks %s'
%
(
task
,
str
(
tasks
))
# pylint: disable=E1135
if
self
.
current_task
is
None
:
assert
task
==
tasks
[
0
],
"process has to start with first task"
assert
task
==
tasks
[
0
],
"process has to start with first task"
# pylint: disable=E1136
else
:
assert
tasks
.
index
(
task
)
==
tasks
.
index
(
self
.
current_task
)
+
1
,
\
"tasks must be processed in the right order"
...
...
@@ -234,7 +230,7 @@ class Proc(Document, metaclass=ProcMetaclass):
if
self
.
status
==
PENDING
:
assert
self
.
current_task
is
None
assert
task
==
tasks
[
0
]
assert
task
==
tasks
[
0
]
# pylint: disable=E1136
self
.
status
=
RUNNING
self
.
current_task
=
task
self
.
get_logger
().
debug
(
'started process'
)
...
...
@@ -413,6 +409,7 @@ def all_subclasses(cls):
return
set
(
cls
.
__subclasses__
()).
union
(
[
s
for
c
in
cls
.
__subclasses__
()
for
s
in
all_subclasses
(
c
)])
all_proc_cls
=
{
cls
.
__name__
:
cls
for
cls
in
all_subclasses
(
Proc
)}
""" Name dictionary for all Proc classes. """
...
...
@@ -459,7 +456,7 @@ def proc_task(task, cls_name, self_id, func_attr):
func
=
getattr
(
func
,
'__process_unwrapped'
,
None
)
if
func
is
None
:
logger
.
error
(
'called function was not decorated with @process'
)
self
.
fail
(
'called function %s was not decorated with @process'
%
(
func_attr
,
cls_name
)
)
self
.
fail
(
'called function %s was not decorated with @process'
%
func_attr
)
return
# call the process function
...
...
nomad/processing/data.py
View file @
859810f4
...
...
@@ -22,30 +22,24 @@ calculations, and files
:members:
.. autoclass:: Upload
:members:
.. autoclass:: DataSet
.. autoclass:: User
"""
from
typing
import
List
,
Any
import
sys
from
datetime
import
datetime
from
elasticsearch.exceptions
import
NotFoundError
from
mongoengine
import
\
Document
,
EmailField
,
StringField
,
BooleanField
,
DateTimeField
,
\
ListField
,
DictField
,
ReferenceField
,
IntField
,
connect
import
mongoengine.errors
from
mongoengine
import
StringField
,
BooleanField
,
DateTimeField
,
DictField
,
IntField
import
logging
import
base64
import
time
from
nomad
import
config
,
utils
from
nomad.files
import
UploadFile
,
ArchiveFile
,
FileError
from
nomad.files
import
UploadFile
,
ArchiveFile
from
nomad.repo
import
RepoCalc
from
nomad.user
import
User
,
me
from
nomad.user
import
User
from
nomad.processing.base
import
Proc
,
Chord
,
process
,
task
,
PENDING
,
SUCCESS
,
FAILURE
,
RUNNING
from
nomad.parsing
import
LocalBackend
,
parsers
,
parser_dict
from
nomad.parsing
import
parsers
,
parser_dict
from
nomad.normalizing
import
normalizers
from
nomad.utils
import
get_logger
,
lnr
from
nomad.utils
import
lnr
class
NotAllowedDuringProcessing
(
Exception
):
pass
...
...
@@ -134,7 +128,7 @@ class Calc(Proc):
def
process
(
self
):
self
.
_upload
=
Upload
.
get
(
self
.
upload_id
)
if
self
.
_upload
is
None
:
get_logger
().
error
(
'calculation upload does not exist'
)
self
.
get_logger
().
error
(
'calculation upload does not exist'
)
try
:
self
.
parsing
()
...
...
@@ -397,7 +391,7 @@ class Upload(Chord):
try
:
upload
=
UploadFile
(
self
.
upload_id
)
except
KeyError
as
e
:
upload_proc
.
fail
(
'Upload does not exist'
,
exc_info
=
e
)
self
.
fail
(
'Upload does not exist'
,
exc_info
=
e
)
return
upload
.
remove_extract
()
...
...
nomad/repo.py
View file @
859810f4
...
...
@@ -195,7 +195,7 @@ class RepoCalc(ElasticDocument):
}
}
}
response
=
conn
.
update_by_query
(
index
,
doc_type
=
[
doc_type
],
body
=
body
)
conn
.
update_by_query
(
index
,
doc_type
=
[
doc_type
],
body
=
body
)
@
staticmethod
def
es_search
(
body
):
...
...
nomad/user.py
View file @
859810f4
...
...
@@ -17,7 +17,6 @@ Module with some prototypes/placeholder for future user management in nomad@FAIR
"""
import
sys
import
time
from
mongoengine
import
Document
,
EmailField
,
StringField
,
ReferenceField
,
ListField
from
passlib.apps
import
custom_app_context
as
pwd_context
from
itsdangerous
import
TimedJSONWebSignatureSerializer
as
Serializer
,
BadSignature
,
SignatureExpired
...
...
@@ -70,6 +69,7 @@ class DataSet(Document):
]
}
# provid a test user for testing
me
=
None
other
=
None
...
...
pylint_mongoengine.py
deleted
100644 → 0
View file @
5c2c1979
from
astroid
import
scoped_nodes
from
astroid
import
MANAGER
def
register
(
linter
):
# Needed for registering the plugin.
pass
def
transform
(
cls
:
scoped_nodes
.
ClassDef
):
if
any
(
getattr
(
base
,
'name'
,
None
)
==
'Document'
for
base
in
cls
.
bases
):
cls
.
locals
[
'objects'
]
=
[
scoped_nodes
.
FunctionDef
(
'objects'
)]
MANAGER
.
register_transform
(
scoped_nodes
.
ClassDef
,
transform
)
requirements-dev.txt
View file @
859810f4
...
...
@@ -2,6 +2,8 @@ watchdog
gitpython
mypy
pylint
pylint_plugin_utils
pylint-mongoengine
pycodestyle
pytest
pytest-timeout
...
...
tests/__init__.py
0 → 100644
View file @
859810f4
tests/conftest.py
View file @
859810f4
...
...
@@ -34,7 +34,7 @@ def patched_celery(monkeypatch):
# The bug has a fix from Aug 2018, but it is not yet released (TODO).
# We monkeypatch a similar solution here.
def
add_reader
(
self
,
fds
,
callback
,
*
args
):
from
kombu.utils.eventio
import
ERR
,
READ
,
WRITE
,
poll
from
kombu.utils.eventio
import
ERR
,
READ
,
poll
if
self
.
poller
is
None
:
self
.
poller
=
poll
()
...
...
@@ -113,4 +113,4 @@ def one_error(caplog):
if
record
.
levelname
in
[
'ERROR'
,
'CRITICAL'
]:
count
+=
1
if
count
>
1
:
assert
False
,
"oo many errors"
\ No newline at end of file
assert
False
,
"oo many errors"
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment