in my tasks.py file I want to import models from polls app, but I get django.core.exceptions.AppRegistryNotReady: Apps aren't loaded yet when starting the worker
tasks.py
from __future__ import absolute_import
import sys ,os
from polls.models import User
from .celery import app
#app.task
def add_user(user):
# for user in users:
print('urra')
#user = User(user.first_name, user.last_name, user.email)
# user.save()
celery.py:
from __future__ import absolute_import, unicode_literals
from celery import Celery
import os, sys
from task import celery_config
import dotenv
from os.path import dirname, join
app = Celery('task',
broker='amqp://root:lusine_admin#localhost/task',
backend='amqp://',
include=['task.tasks'])
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "task.settings")
app.config_from_object(celery_config)
# app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
if __name__ == '__main__':
app.start()
Actaually I got error polls module not found, but then from bash I added it to pythonpath and know I get this error.
Your error is with your config. If you want to connect celery with your django, you have to initialize the celery config from the django settings. In your celery.py replace this line:
app.config_from_object(celery_config)
with
app.config_from_object('django.conf:settings', namespace='CELERY')
Related
I have a doubt regarding the implementation of celery with rabbitMQ since only the first function (debug_task()) that I have defined in celery.py is executed.
The problem is that send_user_mail(randomNumber, email) is not working. debug_task is working, so it's registered.
This is the celery console
[2022-10-08 22:28:48,081: ERROR/MainProcess] Received unregistered
task of type 'callservices.celery.send_proveedor_mail_new_orden'. The
message has been ignored and discarded.
Did you remember to import the module containing this task? Or maybe
you are using relative imports?
Why it's unregistered?
celery.py
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
from django.conf import settings
from django.core.mail import EmailMultiAlternatives, send_mail
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'callservices.settings')
app = Celery('tasks',broker='pyamqp://guest#localhost//')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(settings.INSTALLED_APPS)
#app.task()
def debug_task():
print("hi all")
#app.task()
def send_user_mail(randomNumber, email):
subject = 'email validation - ServicesYA'
cuerpo="Your number is: "+str(randomNumber)
send_mail(subject, cuerpo ,'xxx.ssmtp#xxx.com', [email],fail_silently = False)
return 1
This is init.py
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from celery import app as celery_app
__all__ = ('celery_app',)
and in settings.py I add this line:
BROKER_URL = "amqp://guest:guest#localhost:5672//"
I have an app structured as below which allows a user to schedule tasks to query FB API each morning, afternoon etc to pull page/post data (3am in this case).
The problem I am experiencing is that the scheduler is executing each job twice which is obviously undesirable. Oddly, the issue doesn’t seem to occur locally, only when in production mode and I am hosting this in Heroku with 1 web dyno and 1 worker dyno, each with only 1 process each. I am therefore leaning towards Heroku being the issue.
I have a page to list the current scheduled jobs and when I inspect it, upon refreshing the page the same job instance will flick between two different values (screenshot below). It is as if there are two BackgroundScheduler's instances running.
I suspected this was to do with the BackgroundScheduler being initiated twice (once in flasky.py and again in tasks.py) so I created a temporary solution to stop the double initialising of the BackgroundScheduler and I still experience the same issue and am now stuck. Any help would be much appreciated.
-->app
-->__init__.py
-->decorators.py
-->models.py
-->tasks.py
-->auth
-->__init__.py
-->errors.py
-->forms.py
-->views.py
-->main
-->__init__.py
-->errors.py
-->forms.py
-->views.py
-->static
-->templates
-->migrations
config.py
flasky.py
Procfile
requirements.txt
app/flasky.py
from flask import Flask,render_template, session, redirect, url_for, flash
import os
from app import create_app,db
from app.models import User,Role
from datetime import datetime
from flask_migrate import Migrate,upgrade
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
app/_ init _.py
from flask import Flask, render_template
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from config import config
from flask_session import Session
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
import rq
from redis import Redis
from flask_login import LoginManager
from worker import conn
import os
bootstrap = Bootstrap()
moment = Moment()
db = SQLAlchemy()
scheduler = BackgroundScheduler()
migrate = Migrate()
session= Session()
login_manager = LoginManager()
login_manager.login_view = 'auth.login'
def create_app(config_name='default'):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
bootstrap.init_app(app)
moment.init_app(app)
migrate.init_app(app,db)
session.init_app(app)
login_manager.init_app(app)
if not scheduler.running:
scheduler.start()
jobstore_url = os.environ.get('DATABASE_URL')
scheduler.add_jobstore(SQLAlchemyJobStore(url=jobstore_url),'sqlalchemy')
from .main import main as main_blueprint
from .auth import auth as auth_blueprint
app.register_blueprint(main_blueprint)
app.register_blueprint(auth_blueprint,url_prefix='/auth')
app.task_queue = rq.Queue('flasky',connection=Redis.from_url(os.environ.get(REDIS_URL))
if app.config['SSL_REDIRECT']:
from flask_sslify import SSLify
sslify = SSLify(app)
return app
app/tasks.py
from . import create_app,db
from .models import User,Tokens,Files
from .decorators import token_getter
from flask_login import current_user
import requests
import datetime as dt
import urllib
import os
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
<FUNCTIONS HERE>
app/auth/views.py
from flask import render_template,url_for,redirect,request,flash,session,current_app,Response
from .. import db,scheduler
from . import auth
from ..models import User,Role,Tokens,Files
from flask_login import login_user, logout_user, login_required
from ..decorators import admin_required, token_setter, token_getter,permission_required
import requests
import urllib
from .forms import LoginForm, SubmitConnection, ScheduleJobForm
from app.tasks import refreshed_google_client,test_context
app/main/views.py
from flask import render_template, session, redirect, url_for, flash,current_app,request
from datetime import datetime
from . import main
from .. import db,scheduler
from ..models import User,Tokens
from .forms import NameForm,AnalyticsForm
from flask_login import login_required,current_user
from ..decorators import admin_required,permission_required
import requests
import rq
from redis import Redis
from app.tasks import refreshed_google_client,load_analytics
#main.route('/ig_sync',methods=['GET','POST'])
#login_required
#permission_required(4)
def ig_sync():
form = IGAnalyticsForm()
if request.method=='POST':
from app.tasks import load_ig_sync
if form.validate_on_submit():
if form.submit_analytics_schedule.data:
#GET VARIABLES FROM FORM
scheduler.add_job(func=load_ig_sync,args=[#VARIABLES HERE],trigger='cron',hour=3,id=f'SYNC_IG_{page_name}',jobstore='sqlalchemy')
return(redirect(url_for('main.job_schedule')))
return render_template('ig_sync.html',form=form)
app/Procfile
web: gunicorn flasky:app
worker: rq worker -u $REDIS_URL flasky
I can't import my model into my celery.py file so I use it in a scheduled task - I always get django.core.exceptions.AppRegistryNotReady: Apps aren't loaded yet.
I'm tearing my hair out - a few people seem to have got the same error, but none in the same circumstances and I tried all the fixes and nothing works.
My celery.py file in my main Django app:
from __future__ import absolute_import, unicode_literals
import requests
import os
from celery import Celery
from WeatherData.models import LondonWeather
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Weather.settings')
from .settings import OPEN_WEATHER_API_KEY, OPEN_WEATHER_API_URL
# set the default Django settings module for the 'celery' program.
app = Celery('Weather')
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
#app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
# Gets London weather every hour.
sender.add_periodic_task(10.0, get_weather_task.s(), name='london_weather_test')
sender.add_periodic_task(3600.0, get_weather_task.s(), name='london_weather')
#app.task()
def get_weather_task():
querystring = {"q": "London,UK"}
headers = {
'x-api-key': OPEN_WEATHER_API_KEY,
}
res = requests.get(OPEN_WEATHER_API_URL, headers=headers, params=querystring).json()
LondonWeather.objects.create(
longitude=res.get('coord', 0).get('lon', 0),
latitude=res.get('coord', 0).get('lat', 0),
main_weather=res.get('weather', {})[0].get('main', 'Rain'),
description=res.get('weather', {})[0].get('description', 'No data'),
temperature=res.get('main', {}).get('temp', 0),
pressure=res.get('main', {}).get('pressure', 0),
humidity=res.get('main', {}).get('humidity', 0),
min_temp=res.get('main', {}).get('temp_min', 0),
max_temp=res.get('main', {}).get('temp_max', 0),
wind_speed=res.get('wind', {}).get('speed', 0),
wind_direction=res.get('wind', {}).get('deg', 0),
clouds=res.get('clouds', {}).get('all', 0),
)
return res
The init.py of my main app looks like this because of clery:
from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ('celery_app',)
Is this messing with Django's imports?
i want to display the current exchnage rate of USD/Bitcoin price-pair on my website.
Therefor i set celery and a small periodic_task.
Im currently not really able to understand how i call this periodic_task task or display the json data it returns.
this is how my celeter setup look like:
__init_.py
from __future__ import absolute_import, unicode_literals
from .celery import app as celery_app
__all__ = ('celery_app',)
celery.py
from __future__ import absolute_import, unicode_literals
from celery import Celery
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'myproject.settings')
app = Celery('myproject')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
tasks.py
from celery import Celery
from celery.schedules import crontab
from celery.task import periodic_task
from celery.utils.log import get_task_logger
import requests
logger = get_task_logger(__name__)
app = Celery('tasks', broker='redis://127.0.0.1')
#app.task
def test():
return "Test Successful"
#periodic_task(run_every=(crontab(minute='*/15')), name="get_btc_exchange_rate", ignore_result=True)
def get_exchange_rate():
api_url = "https://api.coinmarketcap.com/v1/ticker/?limit=1"
try:
exchange_rate = requests.get(api_url).json()
logger.info("BTC Exchange rate updated.")
except Exception as e:
print(e)
exchange_rate = dict()
return exchange_rate
I'm currently stating celery with this script:
https://gist.github.com/psych0der/44a8994495abee1b4e832420c1c2974d
So my question is how can i trigger that periodic_task and display the return of the json data/field "price_usd"? in a template
Thanks in advance
You'll need to start a celerybeat instance. It will schedule and send off events that you can set on an interval.
http://docs.celeryproject.org/en/latest/userguide/periodic-tasks.html
Here is my celery config:
config.celery.py
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
import sys
from django.conf import settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings')
app = Celery('config',
backend=os.getenv('REDIS_URL', ),
broker=os.getenv('CLOUDAMQP_URL')
)
app.conf.update(BROKER_URL=os.getenv('CLOUDAMQP_URL', 'redis://localhost'),
CELERY_RESULT_BACKEND=os.getenv('REDIS_URL',
'redis://localhost'))
app.config_from_object('django.conf:settings', namespace='CELERY')
sys.path.append(os.path.join(os.getcwd(), "applications"))
app.autodiscover_tasks()
TASK_SERIALIZER = 'json'
Celery can't find tasks in following structure
project_name/
apps/
users/
tasks.py
config/
celery.py
All my apps are registered in INSTALLED APPS and I'm using app registration via apps.py files.
According to Celery's documentation, your file config/__init__.py should have something similar to this:
from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ('celery_app',)