I have to allow the admin the set the cron time from admin view in django.
Like i have an configuration model , where admin can put the time as record
2 am (record 1)
4 pm (record 2)
So on each record i have to run the cron.
But cron time is in setting.py
CRONJOBS = [
('*/5 * * * *', 'myapp.cron.my_scheduled_job')
]
https://pypi.org/project/django-crontab/
How to make this setting available for admin.
I don't see any nice way of doing it, because vanilla django_crontab allows populating crontab only from settings. You'd better find other package that allows what you want. But if you have no choice, I think the following will work:
my_crontab.py
from django_crontab.app_settings import Settings
from django_crontab.crontab import Crontab
from django.conf import settings
# function need to return crontab
# in the same format as settings.py
def populate_from_db():
# some db magic
return [('*/5 * * * *', 'myapp.cron.my_scheduled_job')]
class DBCronSettings(Settings):
def __init__(self, settings):
super().__init__(settings)
self.CRONJOBS = populate_from_db() #
class DBCrontab(Crontab):
def __init__(self, **options):
super().__init__(**options)
self.settings = DBCronSettings(settings)
You need to subclass Crontab and Settings. Make DBCronSettings read your cron jobs from database and then use this settings in your custom DBCrontab.
Then make your own crontab command. Handle method exactly the same as in base command, but uses your DBCrontab class instead of original.
command.py
from django_crontab.management.commands.crontab import Command as CrontabCommand
from my_crontab import DBCrontab as Crontab
class Command(CrontabCommand):
def handle(self, *args, **options):
"""
Dispatches by given subcommand
"""
if options['subcommand'] == 'add': # add command
with Crontab(**options) as crontab: # initialize a Crontab class with any specified options
crontab.remove_jobs() # remove all jobs specified in settings from the crontab
crontab.add_jobs() # and add them back
elif options['subcommand'] == 'show': # show command
# initialize a readonly Crontab class with any specified options
with Crontab(readonly=True, **options) as crontab:
crontab.show_jobs() # list all currently active jobs from crontab
elif options['subcommand'] == 'remove': # remove command
with Crontab(**options) as crontab: # initialize a Crontab class with any specified options
crontab.remove_jobs() # remove all jobs specified in settings from the crontab
elif options['subcommand'] == 'run': # run command
Crontab().run_job(options['jobhash']) # run the job with the specified hash
else:
# output the help string if the user entered something not specified above
print(self.help)
Also don't forget to remove django_crontab from INSTALLED_APPS if you plan to name your command 'crontab'.
Related
hello guys i trying to use django_crontab on my django project and there not working does anyone know something about this im using Linux centos 8. I want to schedule a task to add some data to my database. Can someone help me
The steps that i have take is:
pip install django-crontab
add to the installed apps
build my cron function
` from django.core.management.base import BaseCommand
from backups.models import Backups
from devices.models import Devices
from datetime import datetime
from jnpr.junos import Device
from jnpr.junos.exception import ConnectError
from lxml import etree
from django.http import HttpResponse
from django.core.files import File
class Command(BaseCommand):
def handle(self, *args, **kwargs):
devices = Devices.objects.all()
for x in devices:
devid = Devices.objects.get(pk=x.id)
ip = x.ip_address
username = x.username
password = x.password
print(devid, ip, username, password)
dev1 = Device(host= ip ,user= username, passwd= password)
try:
dev1.open()
stype = "sucsess"
dataset = dev1.rpc.get_config(options={'format':'set'})
datatext = dev1.rpc.get_config(options={'format':'text'})
result = (etree.tostring(dataset, encoding='unicode'))
file_name = f'{ip}_{datetime.now().date()}.txt'
print(file_name)
with open("media/"f'{file_name}','w') as f:
f.write(etree.tostring(dataset, encoding='unicode'))
f.write(etree.tostring(datatext, encoding='unicode'))
backup = Backups(device_id=devid, host=ip, savetype=stype, time=datetime.now(), backuptext=file_name)
print(backup)
backup.save()
except ConnectError as err:
print ("Cannot connect to device: {0}".format(err))
print("----- Faild ----------")
stype = ("Cannot connect to device: {0}".format(err))
backup = Backups(device_id=devid, host=ip, savetype=stype, time=datetime.now())
backup.save()
`
add my cronjob to my setting.py file :
CRONJOBS = [ ('*/5 * * * *', 'django.core.management.call_command', ['backup-dev']), ]
5)
python manage.py crontab add
6)
python manage.py crontab show
Currently active jobs in crontab:
0662c1224789b131740fddef54f273c1 -> ('* * * * *', 'django.core.management.call_command', ['backup-dev'])
and still not working any ideas
and when i run this command: " python manage.py backup-dev" my task working perfectly
i Also try to add the management command direct to the centos machine via crontab with the command
crontab -e
and still nothing any ideas
I added settings for django_crontab
INSTALLED_APPS = [
....
'django_crontab',
....
]
and setting for cron
CRONJOBS = [
('* * * * *', 'app.cron.task'),
]
add task method to app.cron
from .views import add_to_cache_table()
def task():
add_to_cache_table()
and created method add_to_cache_table() in app.views:
from django.core.cache import cache
def add_to_cache_table():
cache.add('key', 'value')
But, when I'm trying to get value form cache by that key, I get None.
Can i use database cache in cron and write to it?
Thanks!
Hosting service (Pythonanywhere) doesn't support crontab for users.
https://github.com/kraiz/django-crontab/issues/14
Running a small db on pythonanywhere, and am trying to set up a scheduled .backup of my sqlite3 database. Is there any way in the command line to add a time/date stamp to the filename, so that it doesn't overwrite the previous days backup?
Here's the code I'm using, if it matters:
sqlite3 db.sqlite3
.backup dbbackup.sqlite3
.quit
Running every 24 hours. The previous day's backup gets overwritten, though. I'd love to just be able to save it as dbbackup.timestamp.sqlite3 or something, so I could have multiple backups available.
Thanks!
I suggest you to handle this case with management commands and cronjob.
This an example how to do it; save this file eg in yourapp/management/commands/dbackup.py, don't forget to add __init__.py files.
yourapp/management/__init__.py
yourapp/management/commands/__init__.py
yourapp/management/commands/dbackup.py
But, previously add these lines below to your settings.py
USERNAME_SUPERUSER = 'yourname`
PASSWORD_SUPERUSER = `yourpassword`
EMAIL_SUPERUSER = `youremail#domain.com`
DATABASE_NAME = 'db.sqlite3'
The important tree path project if you deploying at pythonanywhere;
/home/yourusername/yourproject/manage.py
/home/yourusername/yourproject/db.sqlite3
/home/yourusername/yourproject/yourproject/settings.py
/home/yourusername/yourproject/yourapp/management/commands/dbackup.py
Add these script below into yourapp/management/commands/dbackup.py, you also can custom this script as you need.
import os
import time
from django.conf import settings
from django.contrib.auth.models import User
from django.core.management.base import (BaseCommand, CommandError)
USERNAME_SUPERUSER = settings.USERNAME_SUPERUSER
PASSWORD_SUPERUSER = settings.PASSWORD_SUPERUSER
EMAIL_SUPERUSER = settings.EMAIL_SUPERUSER
DATABASE_NAME = settings.DATABASE_NAME #eg: 'db.sqlite3'
class Command(BaseCommand):
help = ('Command to deploy and backup the latest database.')
def add_arguments(self, parser):
parser.add_argument(
'-b', '--backup', action='store_true',
help='Just backup command confirmation.'
)
def success_info(self, info):
return self.stdout.write(self.style.SUCCESS(info))
def error_info(self, info):
return self.stdout.write(self.style.ERROR(info))
def handle(self, *args, **options):
backup = options['backup']
if backup == False:
return self.print_help()
# Removing media files, if you need to remove all media files
# os.system('rm -rf media/images/')
# self.success_info("[+] Removed media files at `media/images/`")
# Removing database `db.sqlite3`
if os.path.isfile(DATABASE_NAME):
# backup the latest database, eg to: `db.2017-02-03.sqlite3`
backup_database = 'db.%s.sqlite3' % time.strftime('%Y-%m-%d')
os.rename(DATABASE_NAME, backup_database)
self.success_info("[+] Backup the database `%s` to %s" % (DATABASE_NAME, backup_database))
# remove the latest database
os.remove(DATABASE_NAME)
self.success_info("[+] Removed %s" % DATABASE_NAME)
# Removing all files migrations for `yourapp`
def remove_migrations(path):
exclude_files = ['__init__.py', '.gitignore']
path = os.path.join(settings.BASE_DIR, path)
filelist = [
f for f in os.listdir(path)
if f.endswith('.py')
and f not in exclude_files
]
for f in filelist:
os.remove(path + f)
self.success_info('[+] Removed files migrations for {}'.format(path))
# do remove all files migrations
remove_migrations('yourapp/migrations/')
# Removing all `.pyc` files
os.system('find . -name *.pyc -delete')
self.success_info('[+] Removed all *.pyc files.')
# Creating database migrations
# These commands should re-generate the new database, eg: `db.sqlite3`
os.system('python manage.py makemigrations')
os.system('python manage.py migrate')
self.success_info('[+] Created database migrations.')
# Creating a superuser
user = User.objects.create_superuser(
username=USERNAME_SUPERUSER,
password=PASSWORD_SUPERUSER,
email=EMAIL_SUPERUSER
)
user.save()
self.success_info('[+] Created a superuser for `{}`'.format(USERNAME_SUPERUSER))
Setup this command with crontab
$ sudo crontab -e
And add these following below lines;
# [minute] [hour] [date] [month] [year]
59 23 * * * source ~/path/to/yourenv/bin/activate && cd ~/path/to/yourenv/yourproject/ && ./manage.py dbackup -b
But, if you need to deploy at pythonanywhere, you just need to add these..
Daily at [hour] : [minute] UTC, ... fill the hour=23 and minute=59
source /home/yourusername/.virtualenvs/yourenv/bin/activate && cd /home/yourusername/yourproject/ && ./manage.py dbackup -b
Update 1
I suggest you to update the commands to execute the file of manage.py such as os.system('python manage.py makemigrations') with function of call_command;
from django.core.management import call_command
call_command('collectstatic', verbosity=3, interactive=False)
call_command('migrate', 'myapp', verbosity=3, interactive=False)
...is equal to the following commands typed in terminal:
$ ./manage.py collectstatic --noinput -v 3
$ ./manage.py migrate myapp --noinput -v 3
See running management commands from django docs.
Update 2
Previous condition is if you need to re-deploy your project and using a fresh database. But, if you only want to backup it by renaming the database, you can using module of shutil.copyfile
import os
import time
import shutil
from django.conf import settings
from django.core.management.base import (BaseCommand, CommandError)
DATABASE_NAME = settings.DATABASE_NAME #eg: 'db.sqlite3'
class Command(BaseCommand):
help = ('Command to deploy and backup the latest database.')
def add_arguments(self, parser):
parser.add_argument(
'-b', '--backup', action='store_true',
help='Just backup command confirmation.'
)
def success_info(self, info):
return self.stdout.write(self.style.SUCCESS(info))
def error_info(self, info):
return self.stdout.write(self.style.ERROR(info))
def handle(self, *args, **options):
backup = options['backup']
if backup == False:
return self.print_help()
if os.path.isfile(DATABASE_NAME):
# backup the latest database, eg to: `db.2017-02-29.sqlite3`
backup_database = 'db.%s.sqlite3' % time.strftime('%Y-%m-%d')
shutil.copyfile(DATABASE_NAME, backup_database)
self.success_info("[+] Backup the database `%s` to %s" % (DATABASE_NAME, backup_database))
I am newbie to Celery. I create a project as per instruction provided by the celery4.1 docs.Below is my project folder and files:
mycelery
|
test_celery
|
celery_app.py
tasks.py
__init__.py
1-celery_app.py
from __future__ import absolute_import
import os
from celery import Celery
from kombu import Queue, Exchange
from celery.schedules import crontab
import datetime
app = Celery('test_celery',
broker='amqp://jimmy:jimmy123#localhost/jimmy_v_host',
backend='rpc://',
include=['test_celery.tasks'])
# Optional configuration, see the application user guide.
app.conf.update(
result_expires=3600,
)
if __name__ == '__main__':
app.start()
app.name
2-tasks.py
from __future__ import absolute_import
from test_celery.celery_app import app
import time
from kombu import Queue, Exchange
from celery.schedules import crontab
import datetime
app.conf.beat_schedule = {
'planner_1': {
'task': 'test_celery.tasks.printTask',
'schedule': crontab(minute='*/1'),
},
}
#app.task
def longtime_add(x, y):
print 'long time task begins'
# sleep 5 seconds
time.sleep(5)
print 'long time task finished'
return x + y
#app.task
def printTask():
print 'Hello i am running'
time=str(datetime.datetime.now())
file=open('/home/hub9/mycelery/data.log','ab')
file.write(time)
file.close()
I copied celeryd and celerybeat file from Celery github project and copied to /etc/init.d/ and make them executables. Then i create celeryd and celerybeat file to /etc/default/.
I- /etc/default/celeryd
# Names of nodes to start
# most will only start one node:
#CELERYD_NODES="worker1"
# but you can also start multiple and configure settings
# for each in CELERYD_OPTS (see `celery multi --help` for examples).
CELERYD_NODES="worker1 worker2 worker3"
# Absolute or relative path to the 'celery' command:
CELERY_BIN="/usr/local/bin/celery"
#CELERY_BIN="/virtualenvs/def/bin/celery"
# Where to chdir at start. path to folder containing task
CELERYD_CHDIR="/home/hub9/mycelery/test_celery/"
# App instance to use
# comment out this line if you don't use an app
#CELERY_APP = "file/locatin/of/app"
# or fully qualified:
CELERY_APP="test_celery.celery_app:app"
# Extra command-line arguments to the worker
CELERYD_OPTS="--time-limit=3000 --concurrency=3 --config=celeryconfig"
# %N will be replaced with the first part of the nodename.
CELERYD_LOG_FILE="/var/log/celery/%N.log"
CELERYD_PID_FILE="/var/run/celery/%N.pid"
# Workers should run as an unprivileged user.
# You need to create this user manually (or you can choose
# a user/group combination that already exists, e.g. nobody).
CELERYD_USER="celery"
CELERYD_GROUP="celery"
# If enabled pid and log directories will be created if missing,
# and owned by the userid/group configured.
CELERY_CREATE_DIRS=1
II- /etc/default/celerybeat
# Names of nodes to start
# most will only start one node:
#CELERYD_NODES="worker1"
# but you can also start multiple and configure settings
# for each in CELERYD_OPTS (see `celery multi --help` for examples).
CELERYD_NODES="worker1 worker2 worker3"
# Absolute or relative path to the 'celery' command:
CELERY_BIN="/usr/local/bin/celery"
#CELERY_BIN="/virtualenvs/def/bin/celery"
# Where to chdir at start. path to folder containing task
CELERYD_CHDIR="/home/hub9/mycelery/test_celery/"
# App instance to use
# comment out this line if you don't use an app
#CELERY_APP = "file/locatin/of/app"
# or fully qualified:
CELERY_APP="test_celery.celery_app:app"
# Extra command-line arguments to the worker
CELERYD_OPTS="--time-limit=3000 --concurrency=3 --config=celeryconfig"
# %N will be replaced with the first part of the nodename.
CELERYD_LOG_FILE="/var/log/celery/%N.log"
CELERYD_PID_FILE="/var/run/celery/%N.pid"
# Workers should run as an unprivileged user.
# You need to create this user manually (or you can choose
# a user/group combination that already exists, e.g. nobody).
CELERYD_USER="celery"
CELERYD_GROUP="celery"
# If enabled pid and log directories will be created if missing,
# and owned by the userid/group configured.
CELERY_CREATE_DIRS=1
After that i create celery user and group.
Here is my problem i am successfully run this project using celery -A test_celery.celery_app worker -l info --beatcommand but when i start my project using sudo service celeryd start OR sudo service celerybeat start
It gives me import error that no module name test_celery.celery_app.
Please provide me a hint what i am doing wrong.
I use pylons in my job, but I'm new to django. I'm making an rss filtering application, and so I'd like to have two backend processes that run on a schedule: one to crawl rss feeds for each user, and another to determine relevance of individual posts relative to users' past preferences. In pylons, I'd just write paster commands to update the db with that data. Is there an equivalent in django? EG is there a way to run the equivalent of python manage.py shell in a non-interactive mode?
I think that's what Custom Management Commands are there for.
Yes, this is actually how I run my cron backup scripts. You just need to load your virtualenv if you're using virtual environments and your project settings.
I hope you can follow this, but after the line # manage.py shell you can write your code just as if you were in manage.py shell
You can import your virtualenv like so:
import site
site.addsitedir(VIRTUALENV_PATH + '/lib/python2.6/site-packages')
You can then add the django project to the path
import sys
sys.path.append(DJANGO_ROOT)
sys.path.append(PROJECT_PATH)
Next you load the django settings and chdir to the django project
import os
from django.core.management import setup_environ
from myproject import settings
setup_environ(settings)
os.chdir(PROJECT_PATH)
After this point your environment will be set just like if you started with manage.py shell
You can then run anything just as if you were in the interactive shell.
from application.models import MyModel
for element in MyModel:
element.delete()
Here is my backup file in full. I've abstracted the process out into functions. This would be named daily_backup and be put into the cron.daily folder to be run daily. You can see how to set up the environment and modify the functionality as needed.
#!/usr/bin/env python
import sys
import os
import site
import logging
from datetime import datetime
PROJECT_NAME = 'myproject'
DJANGO_ROOT = '/var/www/django'
PROJECT_PATH = DJANGO_ROOT + '/' + PROJECT_NAME
VIRTUALENV_PATH = '/var/www/envs/'+ PROJECT_NAME
BACKUP_DIR = '/var/www/backups/%s/daily' % (PROJECT_NAME)
TODAY = datetime.now().strftime('%Y%m%d-%H%M%S')
FILE_NAME = PROJECT_NAME + '_' + TODAY
site.addsitedir(VIRTUALENV_PATH + '/lib/python2.6/site-packages')
sys.path.append(DJANGO_ROOT)
sys.path.append(PROJECT_PATH)
from django.core.management import setup_environ
from myproject import settings
setup_environ(settings)
os.chdir(PROJECT_PATH)
# manage.py shell
from django.conf import settings
logging.basicConfig(level=logging.WARN)
def _setup():
if not os.path.exists(BACKUP_DIR):
logging.debug('Creating backup directory ' + BACKUP_DIR)
os.mkdir(BACKUP_DIR)
os.mkdir(BACKUP_DIR + '/databases')
else:
logging.debug('Using backup directory ' + BACKUP_DIR)
def _remove_old():
logging.debug('Cleaning out old backups')
# keep past 7 days
command = "find %s* -name '%s*' -mtime +7 -exec rm {} \\;" % (BACKUP_DIR, PROJECT_NAME)
os.system(command)
def _create_backup():
logging.debug('Backup database')
if settings.DATABASE_ENGINE == 'mysql':
command = 'mysqldump -u %s --password=%s %s > %s/databases/%s.sql' % (settings.DATABASE_USER, settings.DATABASE_PASSWORD, settings.DATABASE_NAME, BACKUP_DIR, FILE_NAME)
else:
command = '%s/bin/python %s/manage.py dumpdata --indent=4 > %s/databases/%s.json' % (VIRTUALENV_PATH, PROJECT_PATH, BACKUP_DIR, FILE_NAME)
os.system(command)
logging.debug('Backup project')
command = 'tar -czf %s/%s.tgz -C %s %s/' % (BACKUP_DIR, FILE_NAME, DJANGO_ROOT, PROJECT_NAME)
os.system(command)
if __name__ == '__main__':
_setup()
_remove_old()
_create_backup()
Sounds like you need some twod.wsgi in your life: http://packages.python.org/twod.wsgi/