设置Docker-compose.yml,以经营Redis作为经纪人的Django项目来运行芹菜工人和芹菜节



我使用Django CookieCutter设置了Django项目。项目脚手架非常好。我还选择使用Docker。现在,我正在努力让芹菜v4.0.x在整个设置中工作。

这是我的docker-compose.yml

version: '2'
volumes:
  postgres_data_dev: {}
  postgres_backup_dev: {}
services:
  postgres:
    build: ./compose/postgres
    volumes:
      - postgres_data_dev:/var/lib/postgresql/data
      - postgres_backup_dev:/backups
    environment:
      - POSTGRES_USER=application

  django:
    build:
      context: .
      dockerfile: ./compose/django/development/Dockerfile
    depends_on:
      - postgres
    environment:
      - POSTGRES_USER=application
      - USE_DOCKER=yes
    volumes:
      - .:/app
      - /tmp/
    links:
      - postgres
      - redis
    expose:
      - "8000"
    env_file:
      - ./dev.env
    restart:
      - "on-failure"

  nginx:
    build: 
      context: .
      dockerfile: ./compose/nginx/development/Dockerfile
    depends_on:
      - django
    ports:
      - "0.0.0.0:80:80"
    links:
      - django
    volumes_from:
      - django

  redis:
    image: redis:latest
    hostname: redis

  celeryworker:
    build:
      context: .
      dockerfile: ./compose/django/development/Dockerfile
    env_file: ./dev.env
    depends_on:
      - postgres
      - redis
    command: celery -A application.taskapp worker -l INFO
    restart: "on-failure"

  celerybeat:
    build:
      context: .
      dockerfile: ./compose/django/development/Dockerfile
    env_file: ./dev.env
    depends_on:
      - postgres
      - redis
    command: celery -A application.taskapp beat -l INFO

老实说,我觉得Conform for Cyererybeat/Celeryworker服务似乎存在一些小问题。如果有人可以指出,那就太好了。

更新:

当我执行命令运行容器时,我会发现一个错误,说找不到应用程序

update

这是一个新的组合文件,它刻在我的撰写中毫无疑问。在使其所有工作的地方,我也遇到了线程,有人提到了对服务的订购也很重要。因此,在新版本中,Django首先放置。

version: '2'
volumes:
  postgres_data_dev: {}
  postgres_backup_dev: {}
services:
  django: &django
    build:
      context: .
      dockerfile: ./compose/django/development/Dockerfile
    depends_on:
      - postgres
    volumes:
      - .:/app
      - /tmp/
    links:
      - postgres
      - redis
    environment:
      - POSTGRES_USER=application
      - USE_DOCKER=yes
    expose:
      - "8000"
    env_file:
      - ./dev.env

  postgres:
    build: ./compose/postgres
    volumes:
      - postgres_data_dev:/var/lib/postgresql/data
      - postgres_backup_dev:/backups
    environment:
      - POSTGRES_USER=application
    ports:
      - "5432:5432"

  redis:
    image: redis:latest
    hostname: redis
    ports:
      - "0.0.0.0:6379:6379"
    env_file:
      - ./dev.env

  nginx:
    build:
      context: .
      dockerfile: ./compose/nginx/development/Dockerfile
    depends_on:
      - django
    ports:
      - "0.0.0.0:80:80"
    links:
      - django
    volumes_from:
      - django

  celeryworker:
    <<: *django
    depends_on:
      - redis
      - postgres
    command: "celery -A application.taskapp worker --loglevel INFO --uid taskmaster"

我正在使用相同的技术堆栈。这对我来说很好。docker-compose.yml

redis:
    image: redis
    container_name: redis
    command: ["redis-server", "--port", "${REDIS_PORT}", "--appendonly", "yes","--maxmemory", "1gb", "--maxmemory-policy", "allkeys-lru"]
    ports:
        - "${REDIS_PORT}:${REDIS_PORT}"
    volumes:
        - .:/redis.conf
    networks:
        - pipeline-net
celery-worker:
    build:
    context: ./app
    container_name: celery-worker
    entrypoint: celery
    command: -A celery_app.celery worker --loglevel=info
    volumes:
    - .:/var/www/app/worker
    links:
    - redis
    depends_on:
    - redis
    networks:
    - pipeline-net
celery-beat:
    build:
    context: ./app
    container_name: celery-beat
    entrypoint: celery
    command: -A celery_app.celery beat --loglevel=info
    volumes:
    - .:/var/www/app/beat
    links:
    - celery-worker
    - redis
    depends_on:
    - celery-worker
    - redis
    networks:
    - pipeline-net
flower:
    image: mher/flower
    container_name: flower
    environment:
    - CELERY_BROKER_URL=redis://redis:6379
    - FLOWER_PORT=8888
    ports:
    - 8888:8888
    links:
    - redis
    - celery-worker
    - celery-beat
    depends_on:
    - redis
    - celery-worker
    - celery-beat
    networks:
    - pipeline-net

相关内容

  • 没有找到相关文章

最新更新