Docker Compose for Celery + Redis

Python Celery task queue with Redis broker. Distributed task processing.

python celery redis queue worker
compose.yaml
services:
  web:
    build: .
    ports:
      - target: 8000
        mode: ingress
    environment:
      - CELERY_BROKER_URL=redis://redis:6379/0
      - CELERY_RESULT_BACKEND=redis://redis:6379/0
    healthcheck:
      test:
        - CMD
        - curl
        - -f
        - http://localhost:8000/
      interval: 30s
      timeout: 5s
      retries: 3
    depends_on:
      - redis
    deploy:
      resources:
        reservations:
          cpus: "0.5"
          memory: 256M
    restart: unless-stopped
  worker:
    build: .
    command: celery -A tasks worker --loglevel=info
    environment:
      - CELERY_BROKER_URL=redis://redis:6379/0
      - CELERY_RESULT_BACKEND=redis://redis:6379/0
    depends_on:
      - redis
    deploy:
      resources:
        reservations:
          cpus: "0.5"
          memory: 256M
    restart: unless-stopped
  redis:
    image: redis:7-alpine
    ports:
      - target: 6379
        mode: host
    healthcheck:
      test:
        - CMD
        - redis-cli
        - ping
      interval: 10s
      timeout: 5s
      retries: 5
    deploy:
      resources:
        reservations:
          cpus: "0.5"
          memory: 256M
    restart: unless-stopped
    x-defang-redis: true

Services

  • web .
  • worker .
  • redis redis:7-alpine

Deploy

$ npx defang@latest compose up

Learn more about Defang