django standalone script: cannot import name 'Celery' from 'celery' - django

I am trying to run a standalone Django scipt
import os, sys, django
proj_path = "/path/to/django-project"
import ipdb; ipdb.set_trace()
# This is so Django knows where to find stuff.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "boiler.settings")
sys.path.append(proj_path)
django.setup()
When i run It says
ImportError: cannot import name 'Celery' from 'celery' (/path/to/django-poject/boiler/celery.py)
My folder structure:
django-poject
-- boiler
-- __init__.py
-- settings.py
-- celery.py
-- manage.py
__init__.py
from .celery import app as celery_app
__all__ = ['celery_app']
celery.py
import os
from celery import Celery
import django
import sys
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'boiler.settings')
#This defines the celery app instance
redis = 'redis://:pass#localhost:6379/0'
app = Celery(dirname,
broker=redis,
backend=redis
)
I am able to run celery using
activate virtualenv
cd to django-poject
celery -A boiler worker --loglevel=debug
without any problems
But in standalone its creating problems

You have to name your celery.py something else. Like django_celery.py otherwise it won't work. Celery works fine without it that way, but you want to integrate in with django and like what Santhosh said, the absolute import of itself is giving you issues.
In your project's __init__.py you'll need something like:
from __future__ import absolute_import, unicode_literals
from your_path_to.django_celery import app as celery_app
__all__ = ('celery_app',)

Related

Django Celery, App import error only on production

I have a file structure like this:
myapp
artist_applications
tasks.py
tasks
celery.py
# settings.py
INSTALLED_APPS = [
'myapp.artist_application',
...
# celery.py
import os
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'myapp.settings.production')
app = Celery("tasks")
app.config_from_object('django.conf:settings', namespace="CELERY")
app.autodiscover_tasks()
# tasks.py
from tasks.celery import app as celery_myapp
from django.apps import apps
from django.conf import settings
import requests
#celery_esns.task(name='sample_task')
def sample_task():
print('TESTING CELERY')
#celery_esns.task(name='publish_artist_task')
def publish_artist_task(payload, artist_id):
r = requests.post(settings.PUBLISH_URL, json = payload)
if r.status_code == 200:
apps.get_model('artist_application', 'Artist').objects.filter(unique_id=artist_id).update(published=True)
else:
raise Exception("Error publishing artist with id: " + artist_id)
On development all is running fine when I start Celery with:
celery -A myapp.tasks worker -Q celery -l info
But on production I run the command (in a virtualenv) and I get the error:
django.core.exceptions.ImproperlyConfigured: Cannot import 'artist_application'. Check that 'myapp.artist_application.apps.ArtistApplication.name' is correct.
Any ideas where to look? I don't get how 'runserver' is loading the apps differently then wsgi?
I had to put the full dotted path in apps.py
from django.apps import AppConfig
class ArtistApplicationConfig(AppConfig):
name = 'myapp.artist_application'
verbose_name = "Artist Application"

Celery No hostname was supplied. Reverting to default 'localhost'

I have this in my /var/log/celery/w1.log
I'm following the steps for Celery here.
I have this in my celery.py
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# Set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sample.settings')
app = Celery('sample2',
broker='amqp://',
include=['sample2.tasks'])
# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
if __name__ == '__main__':
app.start()
what can I do to fix this? Thanks in advance.
Full document in this link.
you should just add the below line to __init__.py near settings.py
from .celery import app as celery_app
__all__ = ['celery_app']
project structure
- proj/
- manage.py
- proj/
- __init__.py
- settings.py
- urls.py

Can't import models to celery tasks.py file

in my tasks.py file I want to import models from polls app, but I get django.core.exceptions.AppRegistryNotReady: Apps aren't loaded yet when starting the worker
tasks.py
from __future__ import absolute_import
import sys ,os
from polls.models import User
from .celery import app
#app.task
def add_user(user):
# for user in users:
print('urra')
#user = User(user.first_name, user.last_name, user.email)
# user.save()
celery.py:
from __future__ import absolute_import, unicode_literals
from celery import Celery
import os, sys
from task import celery_config
import dotenv
from os.path import dirname, join
app = Celery('task',
broker='amqp://root:lusine_admin#localhost/task',
backend='amqp://',
include=['task.tasks'])
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "task.settings")
app.config_from_object(celery_config)
# app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
if __name__ == '__main__':
app.start()
Actaually I got error polls module not found, but then from bash I added it to pythonpath and know I get this error.
Your error is with your config. If you want to connect celery with your django, you have to initialize the celery config from the django settings. In your celery.py replace this line:
app.config_from_object(celery_config)
with
app.config_from_object('django.conf:settings', namespace='CELERY')

Celery doesn't see tasks

Here is my celery config:
config.celery.py
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
import sys
from django.conf import settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings')
app = Celery('config',
backend=os.getenv('REDIS_URL', ),
broker=os.getenv('CLOUDAMQP_URL')
)
app.conf.update(BROKER_URL=os.getenv('CLOUDAMQP_URL', 'redis://localhost'),
CELERY_RESULT_BACKEND=os.getenv('REDIS_URL',
'redis://localhost'))
app.config_from_object('django.conf:settings', namespace='CELERY')
sys.path.append(os.path.join(os.getcwd(), "applications"))
app.autodiscover_tasks()
TASK_SERIALIZER = 'json'
Celery can't find tasks in following structure
project_name/
apps/
users/
tasks.py
config/
celery.py
All my apps are registered in INSTALLED APPS and I'm using app registration via apps.py files.
According to Celery's documentation, your file config/__init__.py should have something similar to this:
from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ('celery_app',)

Celery ImportError: No module named tasks

I'm creating a test scenario for Celery/RabbitMQ/Django. After browsing/reading the various posts similar to mine, I found this one, the closest, but still does not help me. I'm having the "ImportError: no module named tasks" error when executing celery worker.
Celery: 3.1.5 (not dj-celery)
Django: 1.5.5
Project structure:
testcele/ (project name)
mycelery/ (myapp)
__init__
tasks
testcele/
__init__
celery_task
settings
testcele/testcele/celery_task:
from __future__ import absolute_import
import os
from celery import Celery, task, current_task
from django.conf import settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'testcele.settings')
app = Celery('testcele', backend='amqp', broker='amqp://guest#localhost//',
include=['tasks'])
if __name__ == '__main__':
app.start()
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
testcele/testcele/init.py:
from __future__ import absolute_import
from .celery_task import app as celery_app
mycelery/tasks.py:
from __future__ import absolute_import
from celery import Celery, task, current_task, shared_task
#shared_task()
def create_models():
.
.
.
I'm running: "celery worker -A testcele -l INFO", at the "testcele/" sub-dir. I have also tried running from testcele/testcel sub-dir, from testcele/mycelery, replacing "testcele" on the celery worker command with "tasks" or "mycelery". Obviously, this gives other errors.
What I am missing?
Thanks, Ricardo
Try adding a __init__.py file in your mycelery folder to make it a module. If that didn't work specify the tasks when defining your app. Like so:
app = Celery('testcele', backend='amqp', broker='amqp://guest#localhost//',
include=['mycelery.tasks'])