I have setup my celery app for django and trying to test a simple periodic tasks in my django app.
I have used RabbitMQ as message broker and setup broker_url by passing environment variables from docker-compose.yml file as well.
I have following structure in docker-compose.yml file.
version: '3'
services:
nginx:
restart: always
image: nginx:latest
container_name: NGINX
ports:
- "8000:8000"
volumes:
- ./src:/src
- ./config/nginx:/etc/nginx/conf.d
- /static:/static
depends_on:
- web
web:
restart: always
build: .
container_name: DJANGO
command: bash -c "python manage.py makemigrations && python manage.py migrate && gunicorn loop.wsgi -b 0.0.0.0:8000 --reload"
depends_on:
- db
volumes:
- ./src:/src
- /static:/static
expose:
- "8000"
links:
- db
- rabbit
db:
restart: always
image: postgres:latest
container_name: PSQL
rabbit:
hostname: rabbit
image: rabbitmq:latest
ports:
# We forward this port for debugging purposes.
- "5672:5672"
# Here, we can access the rabbitmq management plugin.
- "15672:15672"
environment:
- RABBITMQ_DEFAULT_USER=admin
- RABBITMQ_DEFAULT_PASS=mypass
# Celery worker
worker:
build: .
command: bash -c "python manage.py celery worker -B --concurrency=1"
volumes:
- .:/src
links:
- db
- rabbit
depends_on:
- rabbit
For testing purpose, I have created a schedule tasks in settings.py as follows:(scheduling for 10 seconds)
CELERYBEAT_SCHEDULE = {
'schedule-task': {
'task': 'myapp.tasks.test_print',
'schedule': 10, # in seconds
},
}
and the tasts.py file inside myapp has following code:
# -*- coding: utf-8 -*-
from celery.task import task
#task(ignore_result=True, max_retries=1, default_retry_delay=10)
def test_print():
print ("Print from celery task")
When I run command : docker-compose run web python manage.py celery worker -B --concurrency=1
The test_print is executed and the output is printed.
But when I run docker-compose up --build, the following output is printed. But, the tasks test_print is not executed?
rabbit_1 | =INFO REPORT==== 15-Jun-2017::13:05:30 ===
rabbit_1 | accepting AMQP connection <0.354.0> (172.18.0.7:51400 -> 172.18.0.3:5672)
rabbit_1 | =INFO REPORT==== 15-Jun-2017::12:54:25 ===
rabbit_1 | connection <0.421.0> (172.18.0.6:52052 -> 172.18.0.3:5672): user 'admin' authenticated and granted access to vhost '/'
I am new to docker. Any guidance will be very helpful.
Related
I'm trying to create a Django project with celery and redis for the messaging service using docker-compose. I'm getting Cannot connect to amqp://guest:**#127.0.0.1:5672. I'm not using guest as a user anywhere or 127.0.0.1:5672 and amqp is for RabbitMQ but I'm not using RabbitMQ. So, I don't know if my docker-compose volumes are not set correctly for celery to get the settings, where is it getting amqp from, or is the broker miss configured.
docker-compose.yml:
version: '3'
# network
networks:
data:
management:
volumes:
postgres-data:
redis-data:
services:
nginx:
image: nginx
ports:
- "7001:80"
volumes:
- ./nginx.conf:/etc/nginx/conf.d/default.conf:ro
- ../static:/static
command: [nginx-debug, '-g', 'daemon off;']
networks:
- management
depends_on:
- web
db:
image: postgres:14
restart: always
volumes:
- postgres-data:/var/lib/postgresql/data/
- ../data:/docker-entrypoint-initdb.d # import SQL dump
environment:
- POSTGRES_DB=link_checker_db
- POSTGRES_USER=link_checker
- POSTGRES_PASSWORD=passw0rd
networks:
- data
ports:
- "5432:5432"
web:
image: link_checker_backend
build:
context: .
dockerfile: Dockerfile
environment:
- DJANGO_LOG_LEVEL=ERROR
- INITIAL_YAML=/code/initial.yaml
volumes:
- ../:/code
- ../link_checker:/code/link_checker
- ../link_checker_django/:/code/link_checker_django
- ./settings.py:/code/link_checker_django/settings.py
working_dir: /code
command: >
sh -c "
python manage.py migrate --noinput &&
python manage.py collectstatic --no-input &&
python manage.py runserver 0.0.0.0:7000
"
networks:
- data
- management
depends_on:
- db
redis:
image: redis
volumes:
- redis-data:/data
networks:
- data
celery-default:
image: link_checker_backend
volumes:
- ../:/code
- ../link_checker:/code/link_checker
- ../link_checker_django/:/code/link_checker_django
- ./settings.py:/code/link_checker_django/settings.py
working_dir: /code/link_checker
command: celery -A celery worker --pool=prefork --concurrency=30 -l DEBUG
networks:
- data
depends_on:
- db
- redis
celery.py
from celery import Celery
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "link_checker_django.settings")
app = Celery("link_checker")
app.config_from_object("django.conf:settings")
app.conf.task_create_missing_queues = True
app.autodiscover_tasks()
settings.py
BROKER_URL = "redis://redis:6379/0"
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_TASK_SERIALIZER = "json"
File structure:
link_checker_django
deploy
docker-compose.yml
link_checker
celery.py
link_checker_django
settings.py
manage.py
Thanks, for any help.
I have a Django app deployed in Docker containers.
I have 3 config environnements: dev, preprod and prod.
dev is my local environnement (localhost) and preprod/prod are remote linux environnements.
It works when using the "public" Redis server and standard config.
But I need to use our own Redis deployed in Docker container in a remote server (192.168.xx.xx) with name container redis_cont.
And I do not really know how to config. I do not know if it is possible?
I would appreciate some help.
docker-compose
version: '3.7'
services:
web:
restart: always
build:
context: ./app
dockerfile: Dockerfile.dev
restart: always
command: python manage.py runserver 0.0.0.0:8000
volumes:
- ./app:/usr/src/app
ports:
- 8000:8000
env_file:
- ./.env.dev
entrypoint: [ "/usr/src/app/entrypoint.dev.sh" ]
depends_on:
- redis
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/"]
interval: 30s
timeout: 10s
retries: 50
redis:
container_name: redis_cont <= container running in remote linux server
image: "redis:alpine"
celery:
build:
context: ./app
dockerfile: Dockerfile.dev
command: celery -A core worker -l info
volumes:
- ./app:/usr/src/app
env_file:
- ./.env.dev
depends_on:
- web
- redis
celery-beat:
build:
context: ./app
dockerfile: Dockerfile.dev
command: celery -A core beat -l info
volumes:
- ./app:/usr/src/app
env_file:
- ./.env.dev
depends_on:
- web
- redis
settings.py
CELERY_BROKER_URL = 'redis://redis:6379'
CELERY_RESULT_BACKEND = 'redis://redis:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_BEAT_SCHEDULE = {
'hello': {
'task': 'project.tasks.hello',
'schedule': crontab() # execute every minute
},
}
Since the containers are not created via the same docker-compose, they won't share the same network. redis_cont just doesn't exist to the services built in the isolated network of your docker-compose.
If Redis container is published on the remote and is accessible using ip:port, you should be able to use it directly in your settings.py. No need to add a new service in your compose file.
Note
To establish a communication between services in the same docker-compose you should use the service name (web, celery-beat, etc in your case) and not the container name.
when I am trying to run my application I using without docker its working perfectly , but In docker-compose I am getting this error :
| Error: Invalid value for '-A' / '--app':
| Unable to load celery application.
| The module sampleproject was not found.
my docker-compose file
app:
container_name: myapp
hostname: myapp
build:
context: .
dockerfile: Dockerfile
image: sampleproject
tty: true
command: >
bash -c "
python manage.py migrate &&
python manage.py runserver 0.0.0.0:8000
"
env_file: .env
ports:
- "8000:8000"
volumes:
- .:/project
depends_on:
- database
- redis
redis:
image: redis:alpine
celery:
build:
context: ./
dockerfile: Dockerfile
command: celery -A sampleproject worker -l info
depends_on:
- database
- redis
celery-beat:
build: .
command: celery -A sampleproject beat -l info
depends_on:
- database
- redis
- celery
my Docker file
FROM python:3.8
COPY requirements.txt requirements.txt
RUN pip install -r requirements.txt --no-cache-dir \
&& rm -rf requirements.txt
RUN mkdir /project
WORKDIR /project
my folder structure is something like this :
I had the same problem and with that we found the solution. In fact, celery is right to complain about not being able to run, as it needs an instance of the application.
For that, it is only necessary to add the volume directive in the docker-compose .yaml file, directing to the project folder, in case into the service celery and celery-beat.
Example:
app:
container_name: myapp
hostname: myapp
build:
context: .
dockerfile: Dockerfile
image: sampleproject
tty: true
command: >
bash -c "
python manage.py migrate &&
python manage.py runserver 0.0.0.0:8000
"
env_file: .env
ports:
- "8000:8000"
volumes:
- .:/project
depends_on:
- database
- redis
redis:
image: redis:alpine
celery:
build:
context: ./
dockerfile: Dockerfile
command: celery -A sampleproject worker -l info
volumes:
- .:/project
depends_on:
- database
- redis
celery-beat:
build: .
command: celery -A sampleproject beat -l info
volumes:
- .:/project
depends_on:
- database
- redis
- celery
So when the celery container is executed it will see that there is a volume and will execute the project without any problems.
Regards
I have been working on a Django Application, that runs on Redis, PostgreSQL, Celery, RabbitMQ.
I have written a docker-compose to run all of these services in their separate containers.
Here's my docker-compose.yml
version: "3.2"
services:
app:
build:
context: .
image: &app app
ports:
- "8000:8000"
env_file: &envfile
- env.env
volumes:
- ./app:/app
environment:
- DB_HOST=db
command: >
sh -c "python manage.py wait_for_db &&
python manage.py migrate &&
python manage.py runserver 0.0.0.0:8000"
depends_on:
- db
- redis
- broker
redis:
restart: always
image: redis:latest
ports:
- "6379:6379"
db:
image: postgres:12-alpine
environment:
- "POSTGRES_HOST_AUTH_METHOD=trust"
worker:
build: .
image: *app
restart: always
env_file: *envfile
command: ["celery", "worker", "--app=worker.worker.app", "--concurrency=1", "--hostname=worker#%h", "--loglevel=INFO"]
volumes:
- ./app:/app
depends_on:
- broker
- redis
- db
broker:
image: rabbitmq:3
env_file: *envfile
ports:
- 5672:5672
flower:
image: zoomeranalytics/flower:0.9.1-4.0.2
restart: "no"
env_file: *envfile
ports:
- "5555:5555"
depends_on:
- broker
My application is working just fine, and the containers seems to be working, the problem arises when I push an Async job to the worker container. The worker container picks up the job, and start processing, I am trying to access the DB in the worker, but it's giving me the following error -
Task [640127f3-7769-4757-8c33-8de9052ca92c] raised unexpected: OperationalError('could not connect to server: No such file or directory\n\tIs the server running locally and accepting\n\tconnections on Unix domain socket "/tmp/.s.PGSQL.5432"?\n')
I understand that my worker container is trying to access DB, which it is not able to find, can someone please help me access my DB container through the Worker container, quite stuck with this.
I tried depends_on, links, but nothing seems to be working.
Got it, thanks to my teammate.
I missed giving environment variable in the worker container, as soon as I passed, voila!
environment:
- DB_HOST=db
docker-compose.yml
version: '3'
services:
# Django web server
web:
volumes:
- "./app/back:/app"
- "../front/public/static:/app/static"
- "./phantomjs-2.1.1:/app/phantomjs"
build:
context: .
dockerfile: dockerfile_django
#command: python manage.py runserver 0.0.0.0:8080
#command: ["uwsgi", "--ini", "/app/back/uwsgi.ini"]
ports:
- "8080:8080"
links:
- async
- ws_server
- mysql
- redis
async:
volumes:
- "./app/async_web:/app"
build:
context: .
dockerfile: dockerfile_async
ports:
- "8070:8070"
# Aiohtp web socket server
ws_server:
volumes:
- "./app/ws_server:/app"
build:
context: .
dockerfile: dockerfile_ws_server
ports:
- "8060:8060"
# MySQL db
mysql:
image: mysql/mysql-server:5.7
volumes:
- "./db_mysql:/var/lib/mysql"
- "./my.cnf:/etc/my.cnf"
environment:
MYSQL_ROOT_PASSWORD: root
MYSQL_USER: user_b520
MYSQL_PASSWORD: buzz_17KN
MYSQL_DATABASE: dev_NT_pr
MYSQL_PORT: 3306
ports:
- "3300:3306"
# Redis
redis:
image: redis:4.0.6
build:
context: .
dockerfile: dockerfile_redis
volumes:
- "./redis.conf:/usr/local/etc/redis/redis.conf"
ports:
- "6379:6379"
# Celery worker
celery:
build:
context: .
dockerfile: dockerfile_celery
command: celery -A backend worker -l info --concurrency=20
volumes:
- "./app/back:/app"
- "../front/public/static:/app/static"
links:
- redis
# Celery beat
beat:
build:
context: .
dockerfile: dockerfile_beat
command: celery -A backend beat
volumes:
- "./app/back:/app"
- "../front/public/static:/app/static"
links:
- redis
# Flower monitoring
flower:
build:
context: .
dockerfile: dockerfile_flower
command: celery -A backend flower
volumes:
- "./app/back:/app"
- "../front/public/static:/app/static"
ports:
- "5555:5555"
links:
- redis
dockerfile_django
FROM python:3.4
RUN mkdir /app
WORKDIR /app
ADD app/back/requirements.txt /app
RUN pip3 install -r requirements.txt
# Apply migrations
CMD ["python", "manage.py", "migrate"]
#CMD python manage.py runserver 0.0.0.0:8080 & cron && tail -f /var/log/cron.log
CMD ["uwsgi", "--ini", "/app/uwsgi.ini"]
In a web container migrations applied and everything is working.
I also added CMD ["python", "manage.py", "migrate"] to dockerfile_celery-flower-beat, but they dont applied.
I restart the container using the command:
docker-compose up --force-recreate
How to make the rest of the containers see the migration?
log
flower_1 | File "/usr/local/lib/python3.4/site-packages/MySQLdb/connections.py", line 292, in query
flower_1 | _mysql.connection.query(self, query)
flower_1 | django.db.utils.OperationalError: (1054, "Unknown column 'api_communities.is_closed' in 'field list'")