I have a User model, I want its id start from 10000, then its id should auto-increment like:
10001, 10002, 10003, 10004...
My User class:
class User(AbstractUser):
username = models.CharField(max_length=64)
...
Is it possible to make it come true?
EDIT-1
Before ask this question, I have read this link:Is there a way to set the id value of new Django objects to start at a certain value?
But I don't think the answers are good, so I mean if in Django there is a configuration for achieve this?
the way is the same as to do datamigrations with RAW_SQL, change APPNAME on your:
python manage.py makemigrations APPNAME --empty
inside the created file:
operations = [
migrations.RunSQL(
'ALTER SEQUENCE APPNAME_USER_id_seq RESTART WITH 10000;'
)
]
The solution is to set autoincrement field like:
user_id = models.AutoField(primary_key=True)
After this, you can run this command on the database side. You can run this python command by using signals:
ALTER SEQUENCE user_id RESTART WITH 10000;
You can do this by different method.
from django.db.models.signals import post_syncdb
from django.db import connection, transaction
cursor = connection.cursor()
cursor = cursor.execute(""" ALTER SEQUENCE user_id RESTART WITH 10000; """)
transaction.commit_unless_managed()
post_syncdb.connect(auto_increment_start, sender=app_models)
In Django, a model can't have more than one AutoField. And this is used to set a primary key different from the default key.
My solution is to do it manually:
$ ./manage.py shell
Python 3.6.5 (default, Apr 1 2018, 05:46:30)
Type 'copyright', 'credits' or 'license' for more information
IPython 6.4.0 -- An enhanced Interactive Python. Type '?' for help.
In [1]: from django.contrib.auth.models import User
In [2]: u = User.objects.create_user('name', '', '')
In [3]: User.objects.filter(id=u.id).update(id=10000-1)
Out[3]: 1
In [4]: u.delete()
Out[4]:
(0,
{'admin.LogEntry': 0,
'auth.User_groups': 0,
'auth.User_user_permissions': 0,
'auth.User': 0})
In [5]: uu = User.objects.create_user('user', '', '')
In [6]: uu.id
Out[6]: 10000
For MySQL add this in your migration file:
Replace TABLE_NAME and START_VALUE with your table's name and value with which you want to start.
operations = [
migrations.RunSQL('ALTER TABLE TABLE_NAME AUTO_INCREMENT=START_VALUE ;')
]
Related
I'm new in Django Rest Framework. I have this model -
class Post(models.Model):
created = models.DateTimeField(auto_now_add=True)
description = models.CharField(verbose_name='description', db_index=True, max_length=64)
In this model, I want to add field "isActual", which value equal True or False.
His value must be False, if Post created more than 1 month (check field "created").
I don't know how to create it. Is it possible?
First, Create BooleanField() and
Check with current timezone like this Answer and try it.
Hope this helps!
You can annotate your queryset like:
from django.utils.timezone import
from dateutil.relativedelta import relativedelta
from django.db.models import BooleanField, Q, Expression
Post.objects.annotate(
is_actual=ExpressionWrapper(
Q(created__gte=now() - relativedelta(months=1)),
output_field=BooleanField()
)
)
The Post objects that arise from this, will carry an extra attribute is_actual that is True if the post is younger than one month, and False otherwise.
You need to install the python-dateutil package [PiPy] first in order to import the relativedelta [readthedocs.io], with:
pip install python-dateutil
I added a unique attribute uid for MyModel model:
class MyModel(db.Model):
...
uid = db.Column(db.String(50), nullable=False)
...
__table_args__ = (UniqueConstraint('uid', name='unique_uid'),)
I have a migration:
def upgrade():
op.add_column('mymodel', sa.Column('uid', sa.String(length=50), nullable=True))
mymodel = table('mymodel', column('uid'))
op.execute(mymodel.update().values(uid=generate_uid()))
op.create_unique_constraint('unique_uid', 'mymodel', ['uid'])
op.alter_column(
table_name='mymodel',
column_name='uid',
nullable=False
)
On run db upgrade i've got an error:
...
psycopg2.IntegrityError: could not create unique index "unique_uid"
DETAIL: Key (uid)=(c92U6txA2) is duplicated.
How to set unique value for each row on op.execute(mymodel.update().values(uid=generate_uid()))?
$ pip freeze
alembic==0.8.6
Flask==0.10.1
Flask-Fixtures==0.3.3
Flask-Login==0.3.2
Flask-Migrate==1.8.0
Flask-Script==2.0.5
Flask-SQLAlchemy==2.1
itsdangerous==0.24
Jinja2==2.8
Mako==1.0.4
MarkupSafe==0.23
psycopg2==2.6.1
python-editor==1.0
requests==2.10.0
SQLAlchemy==1.0.13
Werkzeug==0.11.9
The possible solution:
from sqlalchemy.orm import Session
from alembic import op
import sqlalchemy as sa
def upgrade():
conn = op.get_bind()
session = Session(bind=conn)
op.add_column('mymodel', sa.Column('uid', sa.String(length=50), nullable=True))
for item in session.query(MyModel).filter_by(uid=None):
item.uid = generate_uid()
session.commit()
op.create_unique_constraint('unique_uid', 'mymodel', ['uid'])
op.alter_column(
table_name='mymodel',
column_name='uid',
nullable=False
)
The migration script that you wrote puts the same uid on all the rows, the generate_uid() function is called once, and its result is then added into all the rows. So then when the index is created you get a duplicated key error.
Depending on what your uids are and the database you maybe able to write a single SQL statement that creates unique ids for all your rows, but the safe bet would be to do a loop and update each row separately.
I have a simple django model
class Directory(models.Model):
id = models.AutoField(primary_key=True)
path = models.TextField(unique=True)
def __unicode__(self):
return self.path
class Meta:
db_table = u'directories'
However, there seems to be some problem to save a Directory instance into the db
>>> from cygapp.models import Directory
>>> d = Directory()
>>> d.path = '/usr'
>>> d.id
>>> d.save()
>>> d.id
4
>>> d
<Directory: /usr>
while the ID field is assigned correctly (the next free value), it is not stored in the db
sqlite> select * from directories;
1|/bin
2|/system/lib
3|/system/bin
|/usr
What am i missing here?
Everything you're doing to save the record is correct and an auto id should be assigned.
Remove your custom id field id = models.AutoField(primary_key=True) since Django does this automatically for you. Then drop your 'directories' table and run python manage.py syncdb. This will ensure you have a correct id field on your model.
I am trying to forward migrate a model with existing data. The model has a new field with constraints unique=True and null=False.
When I do
./manage.py schemamigration myapp --auto
South lets me specify a default value for the new field by asking:
Specify a one-off value to use for existing columns now
Usually I set this to None but since this field needs to be unique I was wondering if it is possible to pass South a unique value via:
>>> import uuid; uuid.uuid1().hex[0:35]
This gives me an error message
! Invalid input: invalid syntax
Any ideas if it is possible to pass South random unique default values when migrating via the commandline?
Thanks.
Unfortunately only the datetime module is available for use as a one-off value in a schemamigration.
However, you can achieve the same effect by splitting this up into three migrations:
add new field to the model without constraints (with null=True, unique=False)
use a datamigration to add the UUID to the new field
add the constraint on the new field (with null=False, unique=True)
Tutorial on data migrations: http://south.readthedocs.org/en/0.7.6/tutorial/part3.html#data-migrations
In django 1.7+ you can do the following. It first adds the field with no indexing and no unique. It then assigns the unique values (I based them on the name and used slugify method which you need to create) and finally alters the field again to add index and unique attributes.
from django.db import migrations
import re
import django.contrib.postgres.fields
from common.utils import slugify
import django.core.validators
def set_slugs(apps, schema_editor):
categories = apps.get_model("myapp", "Category").objects.all()
for category in categories:
category.slug = slugify(category.name)
category.save()
class Migration(migrations.Migration):
dependencies = [
('myapp', '0034_auto_20150906_1936'),
]
operations = [
migrations.AddField(
model_name='category',
name='slug',
field=models.CharField(max_length=30, validators=[django.core.validators.MinLengthValidator(2), django.core.validators.RegexValidator(re.compile('^[0-9a-z-]+$'), 'Enter a valid slug.', 'invalid')], help_text='Required. 2 to 30 characters and can only contain a-z, 0-9, and the dash (-)', unique=False, db_index=False, null=True),
preserve_default=False,
),
migrations.RunPython(set_slugs),
migrations.AlterField(
model_name='category',
name='slug',
field=models.CharField(help_text='Required. 2 to 30 characters and can only contain a-z, 0-9, and the dash (-)', unique=True, max_length=30, db_index=True, validators=[django.core.validators.MinLengthValidator(2), django.core.validators.RegexValidator(re.compile('^[0-9a-z-]+$'), 'Enter a valid slug.', 'invalid')]),
),
]
Here is the Django's official how-to on migrating unique fields.
Migrations that add unique fields
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Applying a "plain" migration that adds a unique non-nullable field to a table
with existing rows will raise an error because the value used to populate
existing rows is generated only once, thus breaking the unique constraint.
Therefore, the following steps should be taken. In this example, we'll add a
non-nullable :class:`~django.db.models.UUIDField` with a default value. Modify
the respective field according to your needs.
* Add the field on your model with ``default=...`` and ``unique=True``
arguments. In the example, we use ``uuid.uuid4`` for the default.
* Run the :djadmin:`makemigrations` command.
* Edit the created migration file.
The generated migration class should look similar to this::
class Migration(migrations.Migration):
dependencies = [
('myapp', '0003_auto_20150129_1705'),
]
operations = [
migrations.AddField(
model_name='mymodel',
name='uuid',
field=models.UUIDField(max_length=32, unique=True, default=uuid.uuid4),
),
]
You will need to make three changes:
* Add a second :class:`~django.db.migrations.operations.AddField` operation
copied from the generated one and change it to
:class:`~django.db.migrations.operations.AlterField`.
* On the first operation (``AddField``), change ``unique=True`` to
``null=True`` -- this will create the intermediary null field.
* Between the two operations, add a
:class:`~django.db.migrations.operations.RunPython` or
:class:`~django.db.migrations.operations.RunSQL` operation to generate a
unique value (UUID in the example) for each existing row.
The resulting migration should look similar to this::
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import uuid
def gen_uuid(apps, schema_editor):
MyModel = apps.get_model('myapp', 'MyModel')
for row in MyModel.objects.all():
row.uuid = uuid.uuid4()
row.save()
class Migration(migrations.Migration):
dependencies = [
('myapp', '0003_auto_20150129_1705'),
]
operations = [
migrations.AddField(
model_name='mymodel',
name='uuid',
field=models.UUIDField(default=uuid.uuid4, null=True),
),
# omit reverse_code=... if you don't want the migration to be reversible.
migrations.RunPython(gen_uuid, reverse_code=migrations.RunPython.noop),
migrations.AlterField(
model_name='mymodel',
name='uuid',
field=models.UUIDField(default=uuid.uuid4, unique=True),
),
]
* Now you can apply the migration as usual with the :djadmin:`migrate` command.
Note there is a race condition if you allow objects to be created while this
migration is running. Objects created after the ``AddField`` and before
``RunPython`` will have their original ``uuid``’s overwritten.
You can manually edit your migration file:
I needed to add random character to some field so I have imported random and randint
import random
import string
and changed the value of default to
default=random.choice(string.lowercase)
It worked.
There is way to do unique value for each row with South.
Define slug in models.py as:
class Foo(models.Model):
slug = models.SlugField(unique=True, default='')
....
Create new migration
run python manage.py schemamigration --auto foo
Open new migration file, and edit it:
# Change add_column to this:
db.add_column(u'account_funnel', 'slug',
self.gf('django.db.models.foo.Foo')(default='',
unique=False,
max_length=50),
keep_default=False)
# right above this add such python code:
foos = orm['foo.Foo'].objects.all()
for foo in foos:
foo.slug = slugify(funnel.name)
foo.save()
# Modify slug as unique field
db.create_unique(u'foo_foo', ['slug'])
ps mark this migration as no_dry_run = True
pss do not forget to import slugify function from django.template.defaultfilters import slugify
I have been working on an application in Django. To begin with, for simplicity, I had been using sqlite3 for the database.
However, once I moved to PostgreSQL, I've run into a bit of a problem: the primary key does not reset once I clear out a table.
This app is a game that is played over a long time period (weeks). As such, every time a new game starts, all of the data is cleared out of the database and then new, randomized data is added.
I'd like to be able to "start over" with primary keys starting at 1 each time I clean/rebuild the game.
The code still works as-is, but integers are a pretty natural way for describing the objects in my game. I'd like to have each new game start at 1 rather than wherever the last game left off.
How can I reset the primary key counter in PostgreSQL? Keep in mind that I don't need to preserve the data in the table since I am wiping it out anyway.
In your app directory try this:
python manage.py help sqlsequencereset
Pipe it into psql like this to actually run the reset:
python manage.py sqlsequencereset myapp1 myapp2 | psql
Edit: here's an example of the output from this command on one of my tables:
BEGIN;
SELECT setval('"project_row_id_seq"', coalesce(max("id"), 1), max("id") IS NOT null) FROM "project_row";
COMMIT;
As suggested by "Van Gale" you can get the commands to solve your problem running sqlsequencereset.
or
You can execute the SQL query generated by sqlsequencereset from within python in this way (using the default database):
from django.core.management.color import no_style
from django.db import connection
from myapps.models import MyModel1, MyModel2
sequence_sql = connection.ops.sequence_reset_sql(no_style(), [MyModel1, MyModel2])
with connection.cursor() as cursor:
for sql in sequence_sql:
cursor.execute(sql)
I tested this code with Python3.6, Django 2.0 and PostgreSQL 10.
If you perform a raw sql, can do this:
ALTER SEQUENCE youApp_id_seq RESTART WITH 1;
docs:
http://www.postgresql.org/docs/8.2/static/sql-altersequence.html
I view auto-increment primary keys as purely internal identifiers for database records, and I don't like exposing them to users. Granted, it's a common design to use them as part of URLs, but even there slugs or other identifiers feel more appropriate.
If you do not want to have to manually grab the apps you need, or if you have a series of different databases, this command will dynamically gather all connections from settings.py and reset the sequence.
To run use: python manage.py reset_sequences
import psycopg2
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connections
def dictfetchall(cursor):
"""Return all rows from a cursor as a dict"""
columns = [col[0] for col in cursor.description]
return [
dict(zip(columns, row))
for row in cursor.fetchall()
]
class Command(BaseCommand):
help = "Resets sequencing errors in Postgres which normally occur due to importing/restoring a DB"
def handle(self, *args, **options):
# loop over all databases in system to figure out the tables that need to be reset
for name_to_use_for_connection, connection_settings in settings.DATABASES.items():
db_name = connection_settings['NAME']
host = connection_settings['HOST']
user = connection_settings['USER']
port = connection_settings['PORT']
password = connection_settings['PASSWORD']
# connect to this specific DB
conn_str = f"host={host} port={port} user={user} password={password}"
conn = psycopg2.connect(conn_str)
conn.autocommit = True
select_all_table_statement = f"""SELECT *
FROM information_schema.tables
WHERE table_schema = 'public'
ORDER BY table_name;
"""
# just a visual representation of where we are
print('-' * 20, db_name)
try:
not_reset_tables = list()
# use the specific name for the DB
with connections[name_to_use_for_connection].cursor() as cursor:
# using the current db as the cursor connection
cursor.execute(select_all_table_statement)
rows = dictfetchall(cursor)
# will loop over table names in the connected DB
for row in rows:
find_pk_statement = f"""
SELECT k.COLUMN_NAME
FROM information_schema.table_constraints t
LEFT JOIN information_schema.key_column_usage k
USING(constraint_name,table_schema,table_name)
WHERE t.constraint_type='PRIMARY KEY'
AND t.table_name='{row['table_name']}';
"""
cursor.execute(find_pk_statement)
pk_column_names = dictfetchall(cursor)
for pk_dict in pk_column_names:
column_name = pk_dict['column_name']
# time to build the reset sequence command for each table
# taken from django: https://docs.djangoproject.com/en/3.0/ref/django-admin/#sqlsequencereset
# example: SELECT setval(pg_get_serial_sequence('"[TABLE]"','id'), coalesce(max("id"), 1), max("id") IS NOT null) FROM "[TABLE]";
try:
reset_statement = f"""SELECT setval(pg_get_serial_sequence('"{row['table_name']}"','{column_name}'),
coalesce(max("{column_name}"), 1), max("{column_name}") IS NOT null) FROM "{row['table_name']}" """
cursor.execute(reset_statement)
return_values = dictfetchall(cursor)
# will be 1 row
for value in return_values:
print(f"Sequence reset to {value['setval']} for {row['table_name']}")
except Exception as ex:
# will only fail if PK is not an integer...
# currently in my system this is from django.contrib.sessions
not_reset_tables.append(f"{row['table_name']} not reset")
except psycopg2.Error as ex:
raise SystemExit(f'Error: {ex}')
conn.close()
print('-' * 5, ' ALL ERRORS ', '-' * 5)
for item_statement in not_reset_tables:
# shows which tables produced errors, so far I have only
# seen this with PK's that are not integers because of the MAX() method
print(item_statement)
# just a visual representation of where we are
print('-' * 20, db_name)
You need to truncate the table.
See http://www.postgresql.org/docs/8.1/static/sql-truncate.html