diff --git a/docs/setup.md b/docs/setup.md
index 5c2312cbce62af745e97d45ae88cdf4551e69661..fd720ea55be3eb6024e7d61ade5f61a8d8899a21 100644
--- a/docs/setup.md
+++ b/docs/setup.md
@@ -63,6 +63,10 @@ You can run the worker as part of the docker infrastructure.
 cd infrastructure
 docker-compose up nomad-worker
 ```
+In this case, the worker inside docker and python outside docker, will try to adress
+the Redis backend with different hosts. This does not work. If you need this, you
+could add `127.0.0.1 redis` to your `/etc/hosts`. Or do some docker-compose networking
+magic.
 
 You can also run the worker yourself, e.g. to develop on the processing. To simply
 run a worker do (from the root)
diff --git a/infrastructure/docker-compose.yml b/infrastructure/docker-compose.yml
index 79d156df6aa9455e0a55847a40e9ca63abd05c60..40040f14815bbe1a3837867b35fae041fb916ea1 100644
--- a/infrastructure/docker-compose.yml
+++ b/infrastructure/docker-compose.yml
@@ -66,6 +66,12 @@ services:
     nomad-worker:
         restart: always
         build: ../
+        environment:
+            - NOMAD_MINIO_PORT=9000
+            - NOMAD_MINIO_HOST=minio
+            - NOMAD_RABBITMQ_HOST=rabbitmq
+            - NOMAD_REDIS_HOST=redis
+            - NOMAD_LOGSTASH_HOST=elk
         links:
             - minio
             - redis
@@ -73,4 +79,4 @@ services:
             - elk
         volumes:
             - '../.volumes/fs:/app/.volumes/fs'
-        command: python -m celery worker -l debug -A nomad.processing
+        command: python -m celery worker -l info -A nomad.processing
diff --git a/nomad/config.py b/nomad/config.py
index 156a794f676ecf51801151a98321d0e9c9c4b848..e894c9d28ad3d79e390e61978956ffb1c5f2a217 100644
--- a/nomad/config.py
+++ b/nomad/config.py
@@ -17,13 +17,15 @@ This module is used to store all configuration values. It makes use of
 *namedtuples* to create key sensitive configuration objects.
 """
 
+import os
 from collections import namedtuple
 
 S3Config = namedtuple('S3', ['uploads_bucket', 'repository_bucket', 'archive_bucket'])
 """ API independent configuration for the object storage. """
 
-RabitMQConfig = namedtuple('RabbitMQ', ['host', 'port', 'user', 'password'])
-""" Used to configure the RabbitMQ used by celery as a task backend. """
+CeleryConfig = namedtuple('Celery', [
+    'rabbit_host', 'rabbit_port', 'rabbit_user', 'rabbit_password', 'redis_host'])
+""" Used to configure the RabbitMQ and Redis backends for celery. """
 
 MinioConfig = namedtuple('Minio', ['host', 'port', 'accesskey', 'secret'])
 """ Used to configure the minio object storage API. """
@@ -39,15 +41,16 @@ s3 = S3Config(
     repository_bucket='repository',
     archive_bucket='archive'
 )
-rabbitmq = RabitMQConfig(
-    host='localhost',
-    port=None,
-    user='rabbitmq',
-    password='rabbitmq'
+celery = CeleryConfig(
+    rabbit_host=os.environ.get('NOMAD_RABBITMQ_HOST', 'localhost'),
+    rabbit_port=os.environ.get('NOMAD_RABBITMQ_PORT', None),
+    rabbit_user='rabbitmq',
+    rabbit_password='rabbitmq',
+    redis_host=os.environ.get('NOMAD_REDIS_HOST', 'localhost'),
 )
 minio = MinioConfig(
-    host='localhost',
-    port=9007,
+    host=os.environ.get('NOMAD_MINIO_HOST', 'localhost'),
+    port=int(os.environ.get('NOMAD_MINIO_PORT', '9007')),
     accesskey='AKIAIOSFODNN7EXAMPLE',
     secret='wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'
 )
@@ -56,6 +59,6 @@ fs = FSConfig(
 )
 logstash = LogstashConfig(
     enabled=False,
-    host='localhost',
-    tcp_port=5000
+    host=os.environ.get('NOMAD_LOGSTASH_HOST', 'localhost'),
+    tcp_port=int(os.environ.get('NOMAD_LOGSTASH_TCPPORT', '5000'))
 )
diff --git a/nomad/processing.py b/nomad/processing.py
index d5f34888883e21869be309119747e089c61566ed..20013bb364e694951042b38cf734f32fa7e5b453 100644
--- a/nomad/processing.py
+++ b/nomad/processing.py
@@ -51,8 +51,9 @@ if config.logstash.enabled:
     after_setup_logger.connect(initialize_logstash)
 
 
-broker_url = 'pyamqp://%s:%s@localhost//' % (config.rabbitmq.user, config.rabbitmq.password)
-backend_url = 'redis://localhost/0'
+broker_url = 'pyamqp://%s:%s@%s//' % (
+    config.celery.rabbit_user, config.celery.rabbit_password, config.celery.rabbit_host)
+backend_url = 'redis://%s/0' % config.celery.redis_host
 app = Celery('nomad.processing', backend=backend_url, broker=broker_url)
 app.conf.update(
     accept_content=['pickle'],