I am working on a IoT projects with Django.I don't like to do tedious coding.The problem here is I have a model name Period like this:
class Period(models.Model):
number = models.PositiveIntegerField(primary_key=True)
start_time = models.TimeField()
end_time = models.TimeField()
In addition, i want my Celery beat to do something at Period.end_time and I add this code. Code in mysite/app/tasks.py.
#app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
all_periods = Period.objects.all()
for period in all_periods:
hour = period.end_time.hour
minute = period.end_time.minute
sender.add_periodic_task(
crontab(hour=hour, minute=minute),
do_some_thing.s()
)
#task
def do_some_thing():
#do_some_thing
Here is the other files:
#mysite/mysite/celery.py
from __future__ import absolute_import
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'smartschool.settings')
app = Celery('smartschool')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
#mysite/mysite/__init__.py
from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__=['celery_app']
#mysite/mysite/settings.py ##Celery part.
CELERY_BROKER_URL = 'amqp://'
CELERY_RESULT_BACKEND = 'rpc://'
CELERY_ACCEPT_COTENT = ['application/json']
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TASK_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Asia/Ho_Chi_Minh'
CELERY_IMPORT = ('timetable.tasks')
CELERY_BEAT_SCHEDULE = {
#'test':
#{
# 'task': 'timetable.tasks.hello',
# 'schedule': 10.0,
#},
'system_on':
{
'task': 'timetable.tasks.system_on',
'schedule': crontab(hour=7, minute=0)
},
'system_off':
{
'task': 'timetable.tasks.system_off',
'schedule': crontab(hour=17, minute=30)
},
}
The periodic tasks added handy to CELERY_SHEDULE_BEAT worked fine but the tasks add by add_periodic_task function didn't. English is not my mother tongue; please excuse any errors on my post.
You can use #periodic_task decorator up to your crontab task and after the your project run you should run this code. celery -A YOURPROJETNAME worker -l -b info
Also another way to run crontab jobs like this.
#task
def my_task():
//your code
and your celery.py file
app.conf.beat_schedule = {
'my_task': {
'task': 'Path.tasks.my_task',
'schedule': crontab(minute='*/5'),
'args': []
},
}
i think I 've figured out. By changing #task.on_after_configure.connect to #task.on_after_finalize.connect, the add_periodic_task function has work however only one task was added while I have 11 Period instances!
Related
I am using Celery beat to perform a task that is supposed to be executed at on specific time. I was trying to excute it now by changing the time just to see if it works correctly. What I have noticed is it sends the task correctly when I run a fresh command that is celery -A jgs beat -l INFO but then suppose I change the time in the schedule section from two minutes or three minutes from now and then again run the above command, beat does not send the task. Then I noticed something strange. If I go to the admin area and delete all the other old tasks that were created in the crontab table, and then run the command again it sends the task again to the worker.
The tasks are being traced by the worker correctly and also the celery worker is working correctly. Below are the codes that I wrote to perform the task.
celery.py
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
from django.conf import settings
from celery.schedules import crontab
from django.utils import timezone
from datetime import timezone
# Set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'jgs.settings')
app = Celery('jgs')
# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.conf.enable_utc = False
app.conf.update(timezone = 'Asia/Kolkata')
# app.conf.update(BROKER_URL=os.environ['REDIS_URL'],
# CELERY_RESULT_BACKEND=os.environ['REDIS_URL'])
app.config_from_object('django.conf:settings', namespace='CELERY')
# Celery beat settings
app.conf.beat_schedule = {
'send-expiry-email-everyday': {
'task': 'control.tasks.send_expiry_mail',
'schedule': crontab(hour=1, minute=5),
}
}
# Load task modules from all registered Django apps.
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print(f'Request: {self.request!r}')
control/tasks.py
from celery import shared_task
from django.core.mail import message, send_mail, EmailMessage
from django.conf import settings
from django.template.loader import render_to_string
from datetime import datetime, timedelta
from account.models import CustomUser
from home.models import Contract
#shared_task
def send_expiry_mail():
template = render_to_string('expiry_email.html')
email = EmailMessage(
'Registration Successfull', #subject
template, # body
settings.EMAIL_HOST_USER,
['emaiid#gmail.com'], # sender email
)
email.fail_silently = False
email.content_subtype = 'html' # WITHOUT THIS THE HTML WILL GET RENDERED AS PLAIN TEXT
email.send()
return "Done"
settings.py
############# CELERY SETTINGS #######################
CELERY_BROKER_URL = 'redis://127.0.0.1:6379'
# CELERY_BROKER_URL = os.environ['REDIS_URL']
CELERY_ACCEPT_CONTENT =['application/json']
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TASK_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Asia/Kolkata'
CELERY_RESULT_BACKEND = 'django-db'
# CELERY BEAT CONFIGURATIONS
CELERY_BEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler'
commands that I am using
for worker
celery -A jgs.celery worker --pool=solo -l info
for beat
celery -A jgs beat -l INFO
Please correct me where I going wrong or what I am writing wrong, I completely in beginer phase in this async part.
I am really sorry if my sentences were confusing above.
I am working with celery and i am getting tasks status is pending, may be it is implementation problem. please check my code.
I am trying to save task info like id, name, status in my mongodb database, for this i am using a function which my task will call to save data in mongodb.
Am i getting my task pending because my function call is happening before return statement of task?
settings.py
CELERY_BROKER_URL = 'mongodb://localhost:27017/jobs'
CELERY_RESULT_BACKEND = "mongodb"
CELERY_IGNORE_RESULT = False
CELERY_TRACK_STARTED = True
CELERY_MONGODB_BACKEND_SETTINGS = {
"host": "127.0.0.1",
"port": 27017,
"database": "jobs",
"taskmeta_collection": "my_taskmeta_collection",
}
CELERY_BEAT_SCHEDULE = {
'add-every-minute-contrab': {
'task': 'username_length_periodically',
'schedule': crontab(minute='*/1'),
#'args' : (2,3),
},
}
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = TIME_ZONE
celery.py
from __future__ import absolute_import, unicode_literals
import os, logging
from celery import Celery
from celery.schedules import crontab
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'RestUserAPI.settings')
app = Celery('UserAPI')
# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
tasks.py
from __future__ import absolute_import, unicode_literals
from celery import task, current_task, result
from django.conf import settings
import datetime
from .models import TaskMetaData
#task(name='username_length_periodically', bind=True)
def get_username_length_periodically(self):
last_run = datetime.datetime.now()
dict = {'name':self.name,
'id':self.request.id,
'status':self.AsyncResult(self.request.id).state,
'last_run': last_run,
}
store_metadata(dict)
return dict
def store_metadata(dict):
metadata = TaskMetaData()
metadata.task_id = dict['id']
metadata.task_name = dict['name']
metadata.task_status = dict['status']
metadata.task_last_run = dict['last_run']
metadata.save()
I think this is just a plain old logic error. If you take a look at your call to check the status of the task using AsyncResult:
'status':self.AsyncResult(self.request.id).state,
You'll notice that you are checking the status of the task, while the task is running. That means that the task will always show state PENDING (unless you have track_task_started set) when you check the task because you are always checking the status of the task from inside the task and then never go back and update the status!
In order to update the status of the task, you should kick off a separate monitoring task that periodically checks the status of the task and records it to the database until the tasks is finished or errors out. e.g.,
#app.task(name='monitor')
def monitor(task_id):
result = AsyncResult(task_id)
if result.state in celery.results.READY_STATES:
# update metadata table for the task_id
...
else:
monitor.apply_async(kwargs={ 'task_id': task_id }, countdown=60)
I have been trying to create a task for a while, which consists of creating a sample of a specimen every 5 hours. I have managed to configure celery with redis and execute the task that is as an example in the documentation but when I want to do something more complex that includes a query set it does not execute me.the task disappears from the list when restarting the queue.
this is the structure of the project:
proj:
Muestras:
-views.py
-tasks.py
-models.py
Servicios:
-models.py
proj:
-celery.py
-settings.py
In settings.py:
CELERY_BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Europe/London'
CELERY_BEAT_SCHEDULE = {
'generar-muestras': { # name of the scheduler
'task': 'Muestras.tasks.crear_muestras_tarea',
'schedule': 30.0, # set the period of running
},
}
This is a view that is within Muestras.views
from .models import Muestra
from backend.Servicios.models import Servicio
#this works in console
def generar_muestras():
services = Servicio.models.all()
for i in services:
muestra = Muestra(servicio_id=i.id)
muestra.save
In Muestras.tasks.py
from __future__ import absolute_import, unicode_literals
from celery import task
from .views import generar_muestras
#task
def crear_muestras_task():
print('hola esto tiene una funcion')
#generar_muestras()
this is what i have in celery.py:
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
from django.conf import setting
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj.settings')
app = Celery('proj')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
and when execute
celery -A proj worker -l info -B
everything works well and executes the task but when I make this line in the Muestras.tasks.py and import from .views the view.
generar_muestras()
the task disappears from the list and i get this error:
[2018-11-04 22:31:37,734: INFO/MainProcess] celery#linux-z6z3 ready.
[2018-11-04 22:31:37,876: ERROR/MainProcess] Received unregistered task of
type 'Muestras.tasks.crear_muestras_tarea'.
The message has been ignored and discarded.
Did you remember to import the module containing this task?
Or maybe you're using relative imports?
Please see
http://docs.celeryq.org/en/latest/internals/protocol.html
for more information.
The full contents of the message body was:
b'[[], {}, {"callbacks": null, "errbacks": null, "chain": null, "chord":
null}]' (77b)
Traceback (most recent call last):
File "/home/wecbxxx/PycharmProjects/porj/venv/lib64/python3.6/site-
packages/celery/worker/consumer/consumer.py", line 558, in
on_task_received
strategy = strategies[type_]
KeyError: 'Muestras.tasks.crear_muestras_tarea'
You didn't share your settings.py or how you run the celery worker so I am taking a wild guess.
Your task should be listed under imports setting of celery. See here.
Your task should be decorated by #app.task(). See here
I suggest you go through celery's user guide. I think it can use some structural improvement but should be enough to understand the basics.
To expand on #gokhan's answer, there are two things that you should make sure of:
Decorate your task with #app.task
from __future__ import absolute_import, unicode_literals
from proj.celery import app
from .views import generar_muestras
#app.task
def crear_muestras_task():
print('hola esto tiene una funcion')
#generar_muestras()
Make sure that Muestras appears in settings.INSTALLED_APPS. This will allow the autodiscover to discover your tasks:
Next, a common practice for reusable apps is to define all tasks in a separate tasks.py module, and Celery does have a way to auto-discover these modules:
app.autodiscover_tasks()
With the line above Celery will automatically discover tasks from all of your installed apps, following the tasks.py convention:
settings.py
CELERY_BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TASK_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Asia/Makassar'
# Other Celery settings
CELERY_BEAT_SCHEDULE = {
'add_task': {
'add_task': 'data_loader.tasks.add_task',
'schedule': crontab(minute=55, hour=13),
}
# 'task-number-two': {
# 'task': 'data_loader.tasks.demo',
# 'schedule': crontab(minute=2, hour='18'),
# }
}
celery.py
..............
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'resolution_validator.settings')
app = Celery('resolution_validator')
# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
tasks.py
............
from __future__ import absolute_import, unicode_literals
from celery.task import task
#task()
def add_task():
print("hello world")
a = 10
return True
init.py
...............
from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ['celery_app']
I am also getting the following error
"RecursionError: maximum recursion depth exceeded" with the KeyError.
I am using Django==2.0,celery==4.2.0,python==3.5.2
Not able to get the solution .
There is a problem with your tasks.py. You should use #shared_task decorator instead of #task decorator
from __future__ import absolute_import, unicode_literals
from celery import shared_task
#shared_task
def add_task():
print("hello world")
a = 10
return True
My settings.py
CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml', 'pickle', 'application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
celery.py code
from __future__ import absolute_import
import os
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'webapp.settings')
from django.conf import settings
app = Celery('webapp')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
tasks.py code
from __future__ import absolute_import
from celery.utils.log import get_task_logger
from celery import shared_task
import datetime
logger = get_task_logger(__name__)
#shared_task
def sample_code():
logger.info("Run time:" + str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
return None
On shell I am importing and running as "sample_code.delay()"
Full error stack:
[2016-02-12 00:28:56,331: WARNING/MainProcess] Received and deleted unknown message. Wrong destination?!?
The full contents of the message body was: body: '\x80\x02}q\x01(U\x07expiresq\x02NU\x03utcq\x03\x88U\x04argsq\x04]q\x05U\x05chordq\x06NU\tcallbacksq\x07NU\x08errbacksq\x08NU\x07tasksetq\tNU\x02idq\nU$f02e662e-4eda-4180-9af4-2c8a1ceb57c4q\x0bU\x07retriesq\x0cK\x00U\x04taskq\rU$app.tasks.sample_codeq\x0eU\ttimelimitq\x0fNN\x86U\x03etaq\x10NU\x06kwargsq\x11}q\x12u.' (232b)
{content_type:u'application/x-python-serialize' content_encoding:u'binary'
delivery_info:{'consumer_tag': u'None4', 'redelivered': False, 'routing_key': u'celery', 'delivery_tag': 8, 'exchange': u'celery'} headers={}}
Please let me know where I am wrong
The way it was solved for me is change in command for running celery
It was giving issue for:
celery -A <app_path> worker --loglevel=DEBUG
But it's running without issue if we use:
celery -A <app_path> worker -l info
It may be helpful for other if they face same issue.