diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 303848c07833d7608fb1b9f28017a085f3c707e2..604ae5ca4b1f010b927865a94082cb05bef403f7 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -88,13 +88,20 @@ python linting:
   before_script:
     - cd /app
   script:
-    - pycodestyle --config=pycodestyle.ini nomad tests
+    - ruff nomad tests --output-format gitlab > $CI_PROJECT_DIR/gl-code-quality-report.json
     - pylint --rcfile=.pylintrc nomad tests
     - mypy nomad tests
   rules:
     - if: $CI_COMMIT_TAG
       when: never
     - when: on_success
+  artifacts:
+    name: "nomad_code_quality"
+    when: always
+    reports:
+      codequality: gl-code-quality-report.json 
+
+    expire_in: never
 
 gui linting:
   stage: test
diff --git a/.vscode/extensions.json b/.vscode/extensions.json
index 205e6b945fa7e03a0574a06c798bc01b43d88ec4..1350c3e388cfce36b274ab236dc9ae63a376796d 100644
--- a/.vscode/extensions.json
+++ b/.vscode/extensions.json
@@ -4,7 +4,8 @@
 
 	// List of extensions which should be recommended for users of this workspace.
 	"recommendations": [
-		"redhat.vscode-yaml"
+		"redhat.vscode-yaml",
+		"charliermarsh.ruff"
 
 	],
 	// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
diff --git a/Dockerfile b/Dockerfile
index cdc439cea1260dc7eea0f129faa5e1cdda49530b..1dd78811bb92569beef4fa874f16583866c16f17 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -101,7 +101,6 @@ COPY .pylintrc \
      LICENSE \
      MANIFEST.in \
      mkdocs.yml \
-     pycodestyle.ini \
      pyproject.toml \
      pytest.ini \
      README.md \
diff --git a/docs/develop/setup.md b/docs/develop/setup.md
index 6f166c85259f5226a88418acdcebecdd46bebdab..b2ccd1f74542e2b55c398dfa90ce0f4c4cad67bd 100644
--- a/docs/develop/setup.md
+++ b/docs/develop/setup.md
@@ -398,7 +398,7 @@ pytest -sv tests
     If you excluded plugins in your [NOMAD config](### `nomad.yaml`), then those tests
     will also fail.
 
-We use Pylint, pycodestyle, and mypy to ensure code quality. To run those:
+We use Pylint, ruff, and mypy to ensure code quality. To run those:
 
 ```shell
 nomad dev qa --skip-tests
diff --git a/docs/explanation/architecture.md b/docs/explanation/architecture.md
index fa76aed08cb349e2947489aabe11028b96fcf57d..18549c4696399c63dbb5226816e76c515a77ba01 100644
--- a/docs/explanation/architecture.md
+++ b/docs/explanation/architecture.md
@@ -52,7 +52,7 @@ The *backend* of nomad is written in Python. This includes all parsers, normaliz
 and other data processing. We only use Python 3 and there is no compatibility with
 Python 2. Code is formatted close to [pep8](https://www.python.org/dev/peps/pep-0008/),
 critical parts use [pep484](https://www.python.org/dev/peps/pep-0484/) type-hints.
-[Pycodestyle](https://pypi.org/project/pycodestyle/),
+[ruff](https://docs.astral.sh/ruff),
 [pylint](https://www.pylint.org/), and
 [mypy](http://mypy-lang.org/) (static type checker) are used to ensure quality.
 Tests are written with [pytest](https://docs.pytest.org/en/latest/contents.html).
diff --git a/nomad/app/optimade/common.py b/nomad/app/optimade/common.py
index 76833abe716b5f54719c6ea04434f0f307c26595..b82cf8e9e57d4649e5131122e80fce58df9ad7e0 100644
--- a/nomad/app/optimade/common.py
+++ b/nomad/app/optimade/common.py
@@ -87,7 +87,7 @@ def provider_specific_fields() -> Dict[str, SearchQuantity]:
         if len(nmd_name_split) == 1:
             # plain metadata
             pass
-        elif not nmd_name_split[0] in ['results']:
+        elif nmd_name_split[0] not in ['results']:
             # other domains fields that do not make sense in the optimade context
             continue
         elif len(nmd_name_split) > 2 and nmd_name_split[1] == 'optimade':
diff --git a/nomad/bundles.py b/nomad/bundles.py
index e01619ca4bce3600982033af22d0c95d7a2b43c1..cf5814ee0438b5a458ca7fa22898057be2631382 100644
--- a/nomad/bundles.py
+++ b/nomad/bundles.py
@@ -381,7 +381,7 @@ class BundleImporter:
         # Validate embargo settings
         if self.embargo_length is not None:
             self.upload.embargo_length = self.embargo_length  # Importing with different embargo
-        assert type(self.upload.embargo_length) == int and 0 <= self.upload.embargo_length <= 36, (
+        assert isinstance(self.upload.embargo_length, int) and 0 <= self.upload.embargo_length <= 36, (
             'Invalid embargo_length, must be between 0 and 36 months')
 
     def _import_datasets(self) -> Tuple[List[datamodel.Dataset], Dict[str, str]]:
diff --git a/nomad/cli/admin/migrate.py b/nomad/cli/admin/migrate.py
index 5008f53d54687ab8c0024d2c1028613c86ac7658..d635910f0828b86f6ebfd1682c7633271758462a 100644
--- a/nomad/cli/admin/migrate.py
+++ b/nomad/cli/admin/migrate.py
@@ -505,6 +505,6 @@ def _wrap_author(author):
     If the author is a str, it is instead returned as it is. This is used to get an object
     which is hashable and can be used in sets.
     '''
-    if type(author) == str:
+    if isinstance(author, str):
         return author
     return tuple((k, author[k]) for k in sorted(author.keys()))
diff --git a/nomad/cli/dev.py b/nomad/cli/dev.py
index e2a94f7d2c7d38140467acabe1f7c75b1ee1d358..4f4423e6339a0eae64e043769d47870461c964e4 100644
--- a/nomad/cli/dev.py
+++ b/nomad/cli/dev.py
@@ -42,7 +42,7 @@ def qa(skip_tests: bool, exitfirst: bool):
         click.echo('Run tests ...')
         ret_code += os.system('python -m pytest -sv%s tests' % ('x' if exitfirst else ''))
     click.echo('Run code style checks ...')
-    ret_code += os.system('python -m pycodestyle --config=pycodestyle.ini nomad tests')
+    ret_code += os.system('python -m ruff nomad tests')
     click.echo('Run linter ...')
     ret_code += os.system('python -m pylint --rcfile=.pylintrc nomad tests')
     click.echo('Run static type checks ...')
diff --git a/nomad/files.py b/nomad/files.py
index 5f594b83382ff86361d4ed25864c3f56b2dc951f..02395a439421efa72367ed32a32957c5cd7d1650 100644
--- a/nomad/files.py
+++ b/nomad/files.py
@@ -157,7 +157,7 @@ def is_safe_relative_path(path: str) -> bool:
     It may end with a single '/', indicating that a folder is referred. For referring to
     the base folder, the empty string should be used (not '.' etc).
     '''
-    if type(path) != str:
+    if not isinstance(path, str):
         return False
     if path == '':
         return True
diff --git a/nomad/metainfo/metainfo.py b/nomad/metainfo/metainfo.py
index 25defbb616131a14974022b663609ef26fc5131f..23f344255d0afb4fcf924708ec61e47ad6d22c6e 100644
--- a/nomad/metainfo/metainfo.py
+++ b/nomad/metainfo/metainfo.py
@@ -1423,7 +1423,7 @@ class MSection(metaclass=MObjectMeta):  # TODO find a way to make this a subclas
                         return
 
                 elif dimensions == 1:
-                    if type(value) == str or not isinstance(value, IterableABC):
+                    if isinstance(value, str) or not isinstance(value, IterableABC):
                         raise TypeError(
                             f'The shape of {quantity_def} requires an iterable value, but {value} is not iterable.')
 
@@ -1495,7 +1495,7 @@ class MSection(metaclass=MObjectMeta):  # TODO find a way to make this a subclas
                         return
 
                 elif dimensions == 1:
-                    if type(m_quantity.value) == str or not isinstance(m_quantity.value, IterableABC):
+                    if isinstance(m_quantity.value, str) or not isinstance(m_quantity.value, IterableABC):
                         raise TypeError(
                             f'The shape of {quantity_def} requires an iterable value, '
                             f'but {m_quantity.value} is not iterable.')
@@ -1689,7 +1689,7 @@ class MSection(metaclass=MObjectMeta):  # TODO find a way to make this a subclas
             if dimension == 0:
                 attr_value = self.__set_normalize(tgt_attr, attr_value)
             elif dimension == 1:
-                if type(attr_value) == str or not isinstance(attr_value, IterableABC):
+                if isinstance(attr_value, str) or not isinstance(attr_value, IterableABC):
                     raise TypeError(f'The shape requires an iterable value, but {attr_value} is not.')
 
                 if tgt_attr.type == complex:
diff --git a/nomad/normalizing/mof_deconstructor.py b/nomad/normalizing/mof_deconstructor.py
index c9c47497e6fd11bdc2257b163a6143f16e583fd3..186c0b6361bda4b0bd7ef58bb9d15f20463f36bf 100644
--- a/nomad/normalizing/mof_deconstructor.py
+++ b/nomad/normalizing/mof_deconstructor.py
@@ -781,7 +781,7 @@ def secondary_building_units(ase_atom):
                 metal = sum(metal_oxy, [])
                 metal = [i for i in metal if i not in porphyrin_checker]
                 closest_atoms = sum(
-                    [[i for i in graph[j] if i != atoms and not ase_atom[i].symbol in transition_metals()] for j in connected], [])
+                    [[i for i in graph[j] if i != atoms and ase_atom[i].symbol not in transition_metals()] for j in connected], [])
 
                 if len(metal) > 0:
                     all_carbon_indices = sum([[i for i in graph[j] if i in connected]
diff --git a/nomad/parsing/parsers.py b/nomad/parsing/parsers.py
index c14388768b52753d42b0a311b7bee227c1ea23ae..073f1b08dcc4dd7c2fece6a90c69a71f9db4e851 100644
--- a/nomad/parsing/parsers.py
+++ b/nomad/parsing/parsers.py
@@ -112,7 +112,7 @@ def match_parser(mainfile_path: str, strict=True, parser_name: str = None) -> Tu
             if isinstance(match_result, Iterable):
                 assert parser.creates_children, 'Illegal return value - parser does not specify `creates_children`'
                 for mainfile_key in match_result:  # type: ignore
-                    assert mainfile_key and type(mainfile_key) == str, (
+                    assert mainfile_key and isinstance(mainfile_key, str), (
                         f'Child keys must be strings, got {type(mainfile_key)}')
                 mainfile_keys = sorted(match_result)  # type: ignore
             else:
diff --git a/nomad/processing/data.py b/nomad/processing/data.py
index b9882d6f4cc4169d6ce9820c0f1d484c0dfabb62..b3c51200073addf516c2e6931dec3cfbe9196ef1 100644
--- a/nomad/processing/data.py
+++ b/nomad/processing/data.py
@@ -427,7 +427,7 @@ class MetadataEditRequestHandler:
                 return True, self._verified_value_single(definition, raw_value)
             else:
                 # We have a non-scalar quantity
-                if type(raw_value) == dict:
+                if isinstance(raw_value, dict):
                     # The raw value is a dict - expected to contain keys add/remove/set
                     assert raw_value, 'No operation specified'
                     for key in raw_value:
@@ -450,7 +450,7 @@ class MetadataEditRequestHandler:
 
                 verified_ops = {}
                 for op, values in raw_ops.items():
-                    values = values if type(values) == list else [values]
+                    values = values if isinstance(values, list) else [values]
                     verified_values = [self._verified_value_single(definition, v, op) for v in values]
                     verified_ops[op] = verified_values
 
@@ -486,13 +486,13 @@ class MetadataEditRequestHandler:
                 datetime.fromisoformat(value)  # Throws exception if badly formatted timestamp
             return None if value == '' else value
         elif isinstance(definition.type, metainfo.MEnum):
-            assert type(value) == str, 'Expected a string value'
+            assert isinstance(value, str), 'Expected a string value'
             if value == '':
                 return None
             assert value in definition.type._values, f'Bad enum value {value}'
             return value
         elif isinstance(definition.type, metainfo.Reference):
-            assert type(value) == str, 'Expected a string value'
+            assert isinstance(value, str), 'Expected a string value'
             reference_type = definition.type.target_section_def.section_cls
             if reference_type in [datamodel.User, datamodel.Author]:
                 if value in self.encountered_users:
@@ -2236,7 +2236,7 @@ class Upload(Proc):
         query = Entry.objects(upload_id=self.upload_id)[start:end]
         if not order_by:
             return query
-        if type(order_by) == str:
+        if isinstance(order_by, str):
             return query.order_by(order_by)
         assert type(order_by) == tuple, 'order_by must be a string or a tuple if set'
         return query.order_by(*order_by)
diff --git a/nomad/search.py b/nomad/search.py
index a24d1449656476828308121b31d377de866f9e32..03deb6f59efae97f05983cd7fa3ca91a35229342 100644
--- a/nomad/search.py
+++ b/nomad/search.py
@@ -1046,7 +1046,7 @@ def _api_to_es_aggregation(
             es_agg.bucket(f'agg:parents:{name}', A('reverse_nested'))
 
     elif isinstance(agg, AutoDateHistogramAggregation):
-        if not quantity.annotation.mapping['type'] in ['date']:
+        if quantity.annotation.mapping['type'] not in ['date']:
             raise QueryValidationError(
                 f'The quantity {quantity} cannot be used in a auto date histogram aggregation',
                 loc=['aggregations', name, AggType.HISTOGRAM, 'quantity'])
@@ -1056,7 +1056,7 @@ def _api_to_es_aggregation(
             format='yyyy-MM-dd'))
 
     elif isinstance(agg, DateHistogramAggregation):
-        if not quantity.annotation.mapping['type'] in ['date']:
+        if quantity.annotation.mapping['type'] not in ['date']:
             raise QueryValidationError(
                 f'The quantity {quantity} cannot be used in a date histogram aggregation',
                 loc=['aggregations', name, AggType.HISTOGRAM, 'quantity'])
@@ -1066,7 +1066,7 @@ def _api_to_es_aggregation(
             format='yyyy-MM-dd'))
 
     elif isinstance(agg, HistogramAggregation):
-        if not quantity.annotation.mapping['type'] in ['integer', 'float', 'double', 'long', 'date']:
+        if quantity.annotation.mapping['type'] not in ['integer', 'float', 'double', 'long', 'date']:
             raise QueryValidationError(
                 f'The quantity {quantity} cannot be used in a histogram aggregation',
                 loc=['aggregations', name, AggType.HISTOGRAM, 'quantity'])
@@ -1079,7 +1079,7 @@ def _api_to_es_aggregation(
             AggType.HISTOGRAM, field=quantity.search_field, interval=agg.interval, **params))
 
     elif isinstance(agg, MinMaxAggregation):
-        if not quantity.annotation.mapping['type'] in ['integer', 'float', 'double', 'long', 'date']:
+        if quantity.annotation.mapping['type'] not in ['integer', 'float', 'double', 'long', 'date']:
             raise QueryValidationError(
                 f'The quantity {quantity} cannot be used in a mix-max aggregation',
                 loc=['aggregations', name, 'min_max', 'quantity'])
diff --git a/pycodestyle.ini b/pycodestyle.ini
deleted file mode 100644
index 454c2728173c32c7cc9c210dcaada89ce405c765..0000000000000000000000000000000000000000
--- a/pycodestyle.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-[pycodestyle]
-ignore = E501,E701,E731,W503
diff --git a/pyproject.toml b/pyproject.toml
index b443234174c896718f54d315096629d2eff37ef8..9123a77d27b30bb49f4ec4f94b7908d2e843b842 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -12,6 +12,7 @@ authors = [
 dynamic = ["version"]
 license = { text = "Apache-2.0" }
 requires-python = ">=3.9"
+
 dependencies = [
     'numpy~=1.22.4',
     'nptyping~=1.4.4',
@@ -135,12 +136,27 @@ dev = [
     'mkdocs-macros-plugin==0.6.3',
     'aiosmtpd',
     'mkdocs-click==0.8.0',
-    'mkdocs-redirects==1.2.0'
+    'mkdocs-redirects==1.2.0',
+    'ruff==0.1.3'
 ]
 
 [project.scripts]
 nomad = "nomad.cli:run_cli"
 
+[tool.ruff]
+include = ["nomad/*.py", "tests/*.py"]
+select = [
+    "E", # pycodestyle
+    "W", # pycodestyle
+]
+ignore = [
+    "E501", # Line too long ({width} > {limit} characters)
+    "E701", # Multiple statements on one line (colon)
+    "E731", # Do not assign a lambda expression, use a def
+    "E402"  # Module level import not at top of file
+]
+fixable = ["E", "W"]
+
 [tool.setuptools.packages.find]
 where = [
     ".",
@@ -165,3 +181,4 @@ disable_error_code = "import, annotation-unchecked"
 
 
 [tool.setuptools_scm]
+
diff --git a/requirements-dev.txt b/requirements-dev.txt
index af2ce0589095e9d2da94bb14e9ac652a752c7c55..521770ab6ee90debdf601fac3093758d6d56e5f3 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -281,6 +281,7 @@ rope==0.21.0              # via nomad-lab (pyproject.toml)
 rsa==4.9                  # via -r requirements.txt, python-jose
 ruamel-yaml==0.17.21      # via -r requirements.txt, jupyter-telemetry, oauthenticator, pymatgen
 ruamel-yaml-clib==0.2.7   # via -r requirements.txt, ruamel-yaml
+ruff==0.1.0               # via nomad-lab (pyproject.toml)
 runstats==2.0.0           # via -r requirements.txt, nomad-lab (pyproject.toml)
 scikit-image==0.19.3      # via -r requirements.txt, hyperspy, kikuchipy, pyxem
 scikit-learn==1.0.2       # via -r requirements.txt, kikuchipy, nomad-lab (pyproject.toml), pyxem
diff --git a/tests/app/v1/routers/test_entries.py b/tests/app/v1/routers/test_entries.py
index f251f7af5f18553456b564520614a3ac768ed809..85c059034137fab21b8b9fb3ba53aad1f7645a36 100644
--- a/tests/app/v1/routers/test_entries.py
+++ b/tests/app/v1/routers/test_entries.py
@@ -51,7 +51,7 @@ def perform_entries_raw_test(
         client, headers={}, query={}, owner=None, files={}, total=-1, files_per_entry=5,
         status_code=200, http_method='get'):
 
-    if type(total) == int:
+    if isinstance(total, int):
         total_entries = total_mainfiles = total
     else:
         total_entries, total_mainfiles = total
diff --git a/tests/app/v1/routers/test_uploads.py b/tests/app/v1/routers/test_uploads.py
index 41958c0329346f03d9583c307d16d14d1c399186..18783394abb1575b65c858a856923ce1a526ec2e 100644
--- a/tests/app/v1/routers/test_uploads.py
+++ b/tests/app/v1/routers/test_uploads.py
@@ -69,7 +69,7 @@ def perform_post_put_file(
         client, action, url, mode, file_paths, user_auth=None, token=None, accept='application/json',
         **query_args):
     ''' Posts or puts a file. '''
-    if type(file_paths) == str:
+    if isinstance(file_paths, str):
         file_paths = [file_paths]
     headers = {'Accept': accept}
     if user_auth:
@@ -135,7 +135,7 @@ def assert_file_upload_and_processing(
     the results.
     '''
     source_paths = source_paths or []
-    if type(source_paths) == str:
+    if isinstance(source_paths, str):
         source_paths = [source_paths]
     user_auth, token = test_auth_dict[user]
     # Use either token or bearer token for the post operation (never both)
@@ -199,7 +199,7 @@ def assert_expected_mainfiles(upload_id, expected_mainfiles):
         entries = [e.mainfile for e in Entry.objects(upload_id=upload_id)]
         assert set(entries) == set(expected_mainfiles), 'Wrong entries found'
         for entry in Entry.objects(upload_id=upload_id):
-            if type(expected_mainfiles) != dict or expected_mainfiles[entry.mainfile]:
+            if not isinstance(expected_mainfiles, dict) or expected_mainfiles[entry.mainfile]:
                 assert entry.process_status == ProcessStatus.SUCCESS
             else:
                 assert entry.process_status == ProcessStatus.FAILURE
@@ -858,13 +858,13 @@ def test_get_upload_raw_path(
         if mime_type == 'application/zip':
             if expected_content:
                 with zipfile.ZipFile(io.BytesIO(response.content)) as zip_file:
-                    if type(expected_content) == str:
+                    if isinstance(expected_content, str):
                         # Single file - check content
                         with zip_file.open(os.path.basename(path), 'r') as f:
                             file_content = f.read()
                             assert expected_content.encode() in file_content
                     else:
-                        assert type(expected_content) == list
+                        assert isinstance(expected_content, list)
                         # Directory - check content
                         zip_paths = zip_file.namelist()
                         # Check: only root elements specified in expected_content are allowed
@@ -1185,7 +1185,7 @@ def test_put_upload_raw_path(
     action = 'PUT'
     url = f'uploads/{upload_id}/raw/{target_path}'
     published = False
-    all_entries_should_succeed = not (type(expected_mainfiles) == dict and False in expected_mainfiles.values())
+    all_entries_should_succeed = not (isinstance(expected_mainfiles, dict) and False in expected_mainfiles.values())
     expected_process_status = ProcessStatus.SUCCESS if 'wait_for_processing' in query_args else None
 
     response, _ = assert_file_upload_and_processing(
@@ -1533,7 +1533,7 @@ def test_post_upload(
     '''
     Posts an upload, with different arguments.
     '''
-    if type(source_paths) == str:
+    if isinstance(source_paths, str):
         source_paths = [source_paths]
     if test_limit:
         monkeypatch.setattr('nomad.config.services.upload_limit', 0)
diff --git a/tests/app/v1/routers/test_users.py b/tests/app/v1/routers/test_users.py
index 05d4804a69ba59ed7f11bb486ff65611857d0202..499fd4afded13532e59f52878d690a67504688f6 100644
--- a/tests/app/v1/routers/test_users.py
+++ b/tests/app/v1/routers/test_users.py
@@ -95,7 +95,7 @@ def test_users(client, args, expected_status_code, expected_content):
         assert_user(user, expected_content)
 
     if user_id:
-        if type(user_id) != list:
+        if not isinstance(user_id, list):
             rv = client.get(f'users?user_id={user_id}')
             assert rv.status_code == expected_status_code
             if rv.status_code == 200:
diff --git a/tests/normalizing/test_topology.py b/tests/normalizing/test_topology.py
index a9482e51938a074ac89a97cf9da38024aa779ac1..3883fdc03d731a4159e79bcb8b930674c8451bc2 100644
--- a/tests/normalizing/test_topology.py
+++ b/tests/normalizing/test_topology.py
@@ -340,22 +340,22 @@ def test_topology_2d(surface, ref_topologies):
                 if ref_atoms_property_key == 'm_def':
                     continue
                 atoms_property = atoms[ref_atoms_property_key]
-                if type(atoms_property) == list:
+                if isinstance(atoms_property, list):
                     property = atoms_property[0]
-                    if type(property) == list:
+                    if isinstance(property, list):
                         assert np.allclose(atoms_property, ref_atoms_property, rtol=1e-05, atol=1e-9)
-                    elif type(property) == dict:
+                    elif isinstance(property, dict):
                         for property_keys, property_values in property.items():
                             ref_property = ref_atoms_property[0][property_keys]
                             assert property_values == ref_property
-                elif type(atoms_property) == dict:
+                elif isinstance(atoms_property, dict):
                     for property_keys, property_values in atoms_property.items():
                         ref_property_value = ref_atoms_property[property_keys]
-                        if type(property_values) == float:
+                        if isinstance(property_values, float):
                             assert np.allclose(property_values, ref_property_value, rtol=1e-05, atol=1e-9)
                         else:
                             assert ref_atoms_property == property_values
-                elif type(atoms_property) == float:
+                elif isinstance(atoms_property, float):
                     assert np.allclose(ref_atoms_property, atoms_property, rtol=1e-05, atol=1e-9)
                 else:
                     assert ref_atoms_property == atoms_property
diff --git a/tests/processing/test_base.py b/tests/processing/test_base.py
index c2b6a85662341656c23eba9ea5ba767fbddff8ba..dc8e4e882ddc9c656fd60b402018cfaf2f18281b 100644
--- a/tests/processing/test_base.py
+++ b/tests/processing/test_base.py
@@ -49,12 +49,12 @@ def assert_proc(proc, current_process, process_status=ProcessStatus.SUCCESS, err
 def assert_events(expected_events: List[Union[str, List[str]]]):
     ind = 0
     for expected in expected_events:
-        if type(expected) == str:
+        if isinstance(expected, str):
             # str -> expect a specific event
             assert ind <= len(events), f'Not enough events, expecting {expected}'
             assert expected == events[ind]
             ind += 1
-        elif type(expected) == list:
+        elif isinstance(expected, list):
             # list -> expecting a number of events, in any order
             while expected:
                 assert ind <= len(events), f'Not enough events, expecting one of {expected}'
@@ -216,7 +216,7 @@ class ParentProc(Proc):
                     events.append(f'{self.parent_id}:join:fail')
                     assert False, 'failing in join'
                 else:
-                    if type(join_arg) == bool:
+                    if isinstance(join_arg, bool):
                         join_arg = [join_arg]
                     for i, succeed in enumerate(join_arg):
                         # Start up another child
@@ -316,7 +316,7 @@ def test_parent_child(worker, mongo, reset_events, spawn_kwargs, expected_events
         assert child.process_status == expected_child_status
     for i, join_arg in enumerate(join_args):
         if join_arg != fail:
-            if type(join_arg) == bool:
+            if isinstance(join_arg, bool):
                 join_arg = [join_arg]
             for i2, succeed in enumerate(join_arg):
                 child = ChildProc.get(f'rejoin{i + 1}.{i2}')
diff --git a/tests/processing/test_edit_metadata.py b/tests/processing/test_edit_metadata.py
index 2f57efca00aa2a7c945b774b7ec795478e8850f0..c06103eacc9a5b9e1aaab55755e6cede76d0e659 100644
--- a/tests/processing/test_edit_metadata.py
+++ b/tests/processing/test_edit_metadata.py
@@ -147,7 +147,7 @@ def convert_to_comparable_value(quantity, value, from_format, user):
         return convert_to_comparable_value_single(quantity, value, from_format, user)
     if value is None and from_format == 'es':
         return []
-    if type(value) != list:
+    if not isinstance(value, list):
         value = [value]
     return [convert_to_comparable_value_single(quantity, v, from_format, user) for v in value]
 
@@ -397,7 +397,7 @@ def test_list_quantities(proc_infra, purged_app, example_data_writeable, example
         return dataset_ref
 
     def replace_dataset_ref_or_reflist(ref_or_reflist):
-        if type(ref_or_reflist) == list:
+        if isinstance(ref_or_reflist, list):
             return [replace_dataset_ref(ref) for ref in ref_or_reflist]
         return replace_dataset_ref(ref_or_reflist)
 
@@ -410,7 +410,7 @@ def test_list_quantities(proc_infra, purged_app, example_data_writeable, example
                 kwargs[arg] = kwargs.pop(arg + suffix)
         datasets = kwargs['metadata'].get('datasets')
         if datasets is not None:
-            if type(datasets) == dict:
+            if isinstance(datasets, dict):
                 datasets = {op: replace_dataset_ref_or_reflist(v) for op, v in datasets.items()}
             else:
                 datasets = replace_dataset_ref_or_reflist(datasets)