Django "ValueError: Can't bulk create a multi-table inherited model" - django

Problem
I am using the django-model-utils InheritanceManager. I have a super Notification(models.Model) class which I use to create many notification subclasses such as PostNotification(Notification), CommentNotification(Notification), etc., and when trying to run CommentNotification.objects.bulk_create(list_of_comment_notification_objects), i get the following traceback:
File "/home/me/.virtualenvs/project/local/lib/python2.7/site-packages/django/db/models/query.py", line 429, in bulk_create
raise ValueError("Can't bulk create a multi-table inherited model")
ValueError: Can't bulk create a multi-table inherited model
and upon inspecting the query.py file, we get this causes the error:
for parent in self.model._meta.get_parent_list():
if parent._meta.concrete_model is not self.model._meta.concrete_model:
raise ValueError("Can't bulk create a multi-table inherited model")
Environment
Django Model Utils version: 3.1.1
Django version: 1.11.7
Python version: 2.7.3
Example
PostNotification.objects.bulk_create(
[PostNotification(related_user=user, post=instance) for user in users]
)
throws the above exception
What I have tried and though was a success originally:
I though that simply running:
BaseClass.objects.bulk_create(list_of_SubClass_objects) instead of SubClass.objects.bulk_create(list_of_SubClass_objects) would work and return a list of SubClass values, but subsequently running SubClass.objects.all() would return an empty result. The bulk_create() would only create a Notification base class object for each item in the list.

Found a hacky solution. I hope it works in your case. The trick is create a model (which is not an inherited one) dynamically that has some meta (db_table) set. And use this dynamic model to create Child objects in bulk (in other words write into Child's DB table).
class Parent(models.Model):
name = models.CharField(max_length=10)
class Child(Parent):
phone = models.CharField(max_length=12)
# just an example. Should be expanded to work properly.
field_type_mapping = {
'OneToOneField': models.IntegerField,
'CharField': models.CharField,
}
def create_model(Model, app_label='children', module='', options=None):
"""
Create specified model
"""
model_name = Model.__name__
class Meta:
managed = False
db_table = Model._meta.db_table
if app_label:
# app_label must be set using the Meta inner class
setattr(Meta, 'app_label', app_label)
# Update Meta with any options that were provided
if options is not None:
for key, value in options.iteritems():
setattr(Meta, key, value)
# Set up a dictionary to simulate declarations within a class
attrs = {'__module__': module, 'Meta': Meta}
# Add in any fields that were provided
fields = dict()
for field in Model._meta.fields:
if field.attname == 'id':
continue
if field.model.__name__ == model_name:
field_class_name = type(field).__name__
print(field.attname)
fields[field.attname] = field_type_mapping[field_class_name]()
# Create the class, which automatically triggers ModelBase processing
attrs.update(fields)
model = type(f'{model_name}Shadow', (models.Model,), attrs)
return model
mod = create_model(Child)
parents = [Parent(name=i) for i in range(15)]
parents = Parent.objects.bulk_create(parents)
children = [mod(phone=parent.name, parent_ptr_id=parent.id) for parent in parents]
mod.objects.bulk_create(children)

I've done a custom implementation of bulk_create that seems to be working for my case (only one parent relationship and not autoincremented pk):
from django.db import models
class MultiTableChildQueryset(models.QuerySet):
def bulk_create(self, objs, batch_size=None):
assert batch_size is None or batch_size > 0
if not objs:
return objs
self._for_write = True
objs = list(objs)
parent_model = self.model._meta.pk.related_model
parent_objs = []
for obj in objs:
parent_values = {}
for field in [f for f in parent_model._meta.fields if hasattr(obj, f.name)]:
parent_values[field.name] = getattr(obj, field.name)
parent_objs.append(parent_model(**parent_values))
setattr(obj, self.model._meta.pk.attname, obj.id)
parent_model.objects.bulk_create(parent_objs, batch_size=batch_size)
with transaction.atomic(using=self.db, savepoint=False):
self._batched_insert(objs, self.model._meta.local_fields, batch_size)
return objs

A slightly easier to read version of Moises:
from typing import TypeVar
from django.db.models import Model
M = TypeVar('M', bound=Model)
def multi_inheritance_table_bulk_insert(data: List[M]) -> None:
"""
Bulk insert data into a multi-inheritance table.
"""
if not data:
return
model = data[0].__class__
local_fields = model._meta.local_fields
parent_model = model._meta.pk.related_model
parent_fields = parent_model._meta.local_fields
parent_objects = [
parent_model(**{field.name: getattr(obj, field.name) for field in parent_fields})
for obj in data
]
parent_model.objects.bulk_create(parent_objects)
for parent, obj in zip(parent_objects, data):
obj.pk = parent.pk
queryset = QuerySet(model)
queryset._for_write = True
with transaction.atomic(using=queryset.db, savepoint=False):
queryset._batched_insert(
data,
local_fields,
batch_size=None,
)

Related

Django unique_together with nullable ForeignKey

I'm using Django 1.8.4 in my dev machine using Sqlite and I have these models:
class ModelA(Model):
field_a = CharField(verbose_name='a', max_length=20)
field_b = CharField(verbose_name='b', max_length=20)
class Meta:
unique_together = ('field_a', 'field_b',)
class ModelB(Model):
field_c = CharField(verbose_name='c', max_length=20)
field_d = ForeignKey(ModelA, verbose_name='d', null=True, blank=True)
class Meta:
unique_together = ('field_c', 'field_d',)
I've run proper migration and registered them in the Django Admin. So, using the Admin I've done this tests:
I'm able to create ModelA records and Django prohibits me from creating duplicate records - as expected!
I'm not able to create identical ModelB records when field_b is not empty
But, I'm able to create identical ModelB records, when using field_d as empty
My question is: How do I apply unique_together for nullable ForeignKey?
The most recent answer I found for this problem has 5 year... I do think Django have evolved and the issue may not be the same.
Django 2.2 added a new constraints API which makes addressing this case much easier within the database.
You will need two constraints:
The existing tuple constraint; and
The remaining keys minus the nullable key, with a condition
If you have multiple nullable fields, I guess you will need to handle the permutations.
Here's an example with a thruple of fields that must be all unique, where only one NULL is permitted:
from django.db import models
from django.db.models import Q
from django.db.models.constraints import UniqueConstraint
class Badger(models.Model):
required = models.ForeignKey(Required, ...)
optional = models.ForeignKey(Optional, null=True, ...)
key = models.CharField(db_index=True, ...)
class Meta:
constraints = [
UniqueConstraint(fields=['required', 'optional', 'key'],
name='unique_with_optional'),
UniqueConstraint(fields=['required', 'key'],
condition=Q(optional=None),
name='unique_without_optional'),
]
UPDATE: previous version of my answer was functional but had bad design, this one takes in account some of the comments and other answers.
In SQL NULL does not equal NULL. This means if you have two objects where field_d == None and field_c == "somestring" they are not equal, so you can create both.
You can override Model.clean to add your check:
class ModelB(Model):
#...
def validate_unique(self, exclude=None):
if ModelB.objects.exclude(id=self.id).filter(field_c=self.field_c, \
field_d__isnull=True).exists():
raise ValidationError("Duplicate ModelB")
super(ModelB, self).validate_unique(exclude)
If used outside of forms you have to call full_clean or validate_unique.
Take care to handle the race condition though.
#ivan, I don't think that there's a simple way for django to manage this situation. You need to think of all creation and update operations that don't always come from a form. Also, you should think of race conditions...
And because you don't force this logic on DB level, it's possible that there actually will be doubled records and you should check it while querying results.
And about your solution, it can be good for form, but I don't expect that save method can raise ValidationError.
If it's possible then it's better to delegate this logic to DB. In this particular case, you can use two partial indexes. There's a similar question on StackOverflow - Create unique constraint with null columns
So you can create Django migration, that adds two partial indexes to your DB
Example:
# Assume that app name is just `example`
CREATE_TWO_PARTIAL_INDEX = """
CREATE UNIQUE INDEX model_b_2col_uni_idx ON example_model_b (field_c, field_d)
WHERE field_d IS NOT NULL;
CREATE UNIQUE INDEX model_b_1col_uni_idx ON example_model_b (field_c)
WHERE field_d IS NULL;
"""
DROP_TWO_PARTIAL_INDEX = """
DROP INDEX model_b_2col_uni_idx;
DROP INDEX model_b_1col_uni_idx;
"""
class Migration(migrations.Migration):
dependencies = [
('example', 'PREVIOUS MIGRATION NAME'),
]
operations = [
migrations.RunSQL(CREATE_TWO_PARTIAL_INDEX, DROP_TWO_PARTIAL_INDEX)
]
Add a clean method to your model - see below:
def clean(self):
if Variants.objects.filter("""Your filter """).exclude(pk=self.pk).exists():
raise ValidationError("This variation is duplicated.")
I think this is more clear way to do that for Django 1.2+
In forms it will be raised as non_field_error with no 500 error, in other cases, like DRF you have to check this case manual, because it will be 500 error.
But it will always check for unique_together!
class BaseModelExt(models.Model):
is_cleaned = False
def clean(self):
for field_tuple in self._meta.unique_together[:]:
unique_filter = {}
unique_fields = []
null_found = False
for field_name in field_tuple:
field_value = getattr(self, field_name)
if getattr(self, field_name) is None:
unique_filter['%s__isnull' % field_name] = True
null_found = True
else:
unique_filter['%s' % field_name] = field_value
unique_fields.append(field_name)
if null_found:
unique_queryset = self.__class__.objects.filter(**unique_filter)
if self.pk:
unique_queryset = unique_queryset.exclude(pk=self.pk)
if unique_queryset.exists():
msg = self.unique_error_message(self.__class__, tuple(unique_fields))
raise ValidationError(msg)
self.is_cleaned = True
def save(self, *args, **kwargs):
if not self.is_cleaned:
self.clean()
super().save(*args, **kwargs)
One possible workaround not mentioned yet is to create a dummy ModelA object to serve as your NULL value. Then you can rely on the database to enforce the uniqueness constraint.

Convert a subclass model instance to another subclass model instance in django?

I have a ModelBase, and ModelA, ModelB.
I want to change ModelA instance to ModelB instance. (I can handle the difference of attributes they have)
I've seen related questions but doesn't quite work for me.
How can I create an inherited django model instance from an existing base model instance?
Change class of child on django models
EDIT
When you have Place - Restaurant/Bar relationship,
I think it's quite reasonable to be able to switch a restaurant to a bar.
I had to deal with the same problem, both yuvi and arctelix answers did not work for me. yuvi solution gives an error and arctelix solution creates new object with new pk.
The goal here is to change the subclass model while keeping the original superclass as it is with the old pk.
First: Delete the old subclass and keep the superclass.Check Django documents.
Second: Add the new subclass with its fields and pass the superclass to it.
Check this q
Example: A place could be a restaurant or a caffe, and you want to change a restaurant place to a caffee; as follow:
class Place(models.Model):
name = models.CharField(max_length=50)
address = models.CharField(max_length=80)
class Caffe(Place):
serves_hot_dogs = models.BooleanField(default=False)
serves_pizza = models.BooleanField(default=False)
class Restaurant(Place):
serves_tea = models.BooleanField(default=False)
serves_coffee = models.BooleanField(default=False)
# get the objecte to be changed
rest = Restaurant.objects.get(pk=1) #arbitrary number
#delete the subclass while keeping the parent
rest.delete(keep_parents=True)
place = Place.objects.get(pk=1) # the primary key must be the same as the deleted restaurant
# Create a caffe and pass the original place
caffee = Caffe(place_ptr_id=place.pk) #this will empty the parent field
#update parent fields
caffee.__dict__.update(place.__dict__)
#add other field
........
#save the caffe
caffee.save()
I would create an entirely new instance of the second model with the same values of their shared attributes, then delete the old one. Seems like the cleanest way to me.
If ModelBase is abstract:
instance = ModelA.objects.get(pk=1) #arbitrary
# find parent class fields:
fields = [f.name for f in ModelBase._meta.fields]
# get the values from the modelA instance
values = dict( [(x, getattr(instance, x)) for x in fields] )
#assign same values to new instance of second model
new_instance = ModelB(**values)
#add any additional information to new instance here
new_instance.save() #save new one
instance.delete() # remove the old one
If ModelBase is not abstract, however, you'll have to do an extra workaround:
fields = [f.name for f in ModelBase._meta.fields if f.name != 'id']
#... other parts are the same...
new_instance.modelbase_ptr = instance.modelbase_ptr #re-assign related parent
instance.delete() #delete this first!
new_instance.save()
In yuvi's answer manually assigning modelbase_ptr and saving fails since instance.modelbase_ptr is deleted prior to save.
Building on yuvi's answer here a more explicit example and works generically for abstract and non-abstract conversions of:
ModelBase -> ModelChild
ModelChild -> ModelBase
ModelChild -> ModelChild
Optionally preserves the original id and this follows the django docs recomended methodology.
ex_model = ModelA
new_model = ModelB
ex_instance = ex_model.objects.get(pk=1) #arbitrary
# find fields required for new_model:
new_fields = [f.name for f in new_model._meta.fields]
# make new dict of existing field : value
new_fields_dict = dict( [(x, getattr(ex_instance, x, None)) for x in new_fields] )
# Save temp copy as new_model with new id
# modelbase_ptr will be created automatically as required
new_fields_dict.pop('project_ptr', None)
temp_instance = new_model(**new_fields_dict)
temp_instance.pk = None
temp_instance.id = None
temp_instance.save()
# you must set all your related fields here
temp_instance.copy_related(ex_instance)
ex_instance.delete()
# (optional) Save final copy as new_model with original id
final_instance = new_model(**new_fields_dict)
final_instance.save()
final_instance.copy_related(temp_instance)
temp_instance.delete()
# here are the removed fields, handle as required
removed_fields = [f.name for f in ex_model._meta.fields if f.name not in new_fields_dict.keys()]
removed_fields_dict = dict( [(x, getattr(ex_instance, x, None)) for x in removed_fields] )
In Class ModelBase:
def copy_related(self, from):
# include all your related fields here
self.related_field = from.related_field.all()
self.related_field_a = from.related_field_a.all()

Django: Building a QuerySet Mixin for a model and a related model

My question is about creating a QuerySet Mixin which provides identical QuerySet methods for both a model and a related model. Here is example code, and the first class ByPositionMixin is what I am focused on:
from django.db import models
from django.db.models.query import QuerySet
from django.core.exceptions import FieldError
class ByPositionMixin(object):
def batters(self):
try:
return self.exclude(positions=1)
except FieldError:
return self.exclude(position=1)
class PlayerQuerySet(QuerySet, ByPositionMixin):
pass
class PlayerPositionQuerySet(QuerySet, ByPositionMixin):
pass
class PlayerManager(models.Manager):
def get_query_set(self):
return PlayerQuerySet(self.model, using=self._db)
class PlayerPositionManager(models.Manager):
def get_query_set(self):
return PlayerPositionQuerySet(self.model, using=self._db)
class Position(models.Model):
# pos_list in order ('P', 'C', '1B', '2B', '3B', 'SS', 'LF', 'CF', 'RF')
# pos id / pk correspond to index value of pos_list(pos)
pos = models.CharField(max_length=2)
class Player(models.Model):
name = models.CharField(max_length=100)
positions = models.ManyToManyField(Position, through='PlayerPosition')
objects = PlayerManager()
class PlayerPosition(models.Model):
player = models.ForeignKey(Player)
position = models.ForeignKey(Position)
primary = models.BooleanField()
objects = PlayerPositionManager()
Inside ByPositionMixin, I try exclude(positions=1) which queries against PlayerQuerySet and if that generates a FieldError, I try exclude(position=1) which queries against PlayerPositionQuerySet. The difference in field names is precise, a Player() has positions, but a PlayerPosition() has only one position. So the difference it the exclude() query is 'positions' / 'position'. Since I will have many custom queries (e.g. batters(), pitchers(), by_position() etc.), do I have to write out try / except code for each one?
Or is there a different approach which would let me write custom queries without having to try against one model and then against the other one?
UPDATE: basically, I have decided to write a kwarg helper function, which provides the correct kwargs for both Player and PlayerPosition. It's a little elaborate (and perhaps completely unnecessary), but should be able to be made to simplify the code for several custom queries.
class ByPositionMixin(object):
def pkw(self, **kwargs):
# returns appropriate kwargs, at the moment, only handles one kwarg
key = kwargs.keys()[0] # e.g. 'positions__in'
value = kwargs[key]
key_args = key.split('__')
if self.model.__name__ == 'Player':
first_arg = 'positions'
elif self.model.__name__ == 'PlayerPosition':
first_arg = 'position'
else:
first_arg = key_args[0]
key = '__'.join([first_arg] + key_args[1:])
return {key: value}
def batters(self): # shows how pkw() is used
return self.exclude(**self.pkw(positions=1))

Django: Get list of model fields?

I've defined a User class which (ultimately) inherits from models.Model. I want to get a list of all the fields defined for this model. For example, phone_number = CharField(max_length=20). Basically, I want to retrieve anything that inherits from the Field class.
I thought I'd be able to retrieve these by taking advantage of inspect.getmembers(model), but the list it returns doesn't contain any of these fields. It looks like Django has already gotten a hold of the class and added all its magic attributes and stripped out what's actually been defined. So... how can I get these fields? They probably have a function for retrieving them for their own internal purposes?
Django versions 1.8 and later:
You should use get_fields():
[f.name for f in MyModel._meta.get_fields()]
The get_all_field_names() method is deprecated starting from Django
1.8 and will be removed in 1.10.
The documentation page linked above provides a fully backwards-compatible implementation of get_all_field_names(), but for most purposes the previous example should work just fine.
Django versions before 1.8:
model._meta.get_all_field_names()
That should do the trick.
That requires an actual model instance. If all you have is a subclass of django.db.models.Model, then you should call myproject.myapp.models.MyModel._meta.get_all_field_names()
As most of answers are outdated I'll try to update you on Django 2.2
Here posts- your app (posts, blog, shop, etc.)
1) From model link: https://docs.djangoproject.com/en/stable/ref/models/meta/
from posts.model import BlogPost
all_fields = BlogPost._meta.fields
#or
all_fields = BlogPost._meta.get_fields()
Note that:
all_fields=BlogPost._meta.get_fields()
Will also get some relationships, which, for ex: you can not display in a view.
As in my case:
Organisation._meta.fields
(<django.db.models.fields.AutoField: id>, <django.db.models.fields.DateField: created>...
and
Organisation._meta.get_fields()
(<ManyToOneRel: crm.activity>, <django.db.models.fields.AutoField: id>, <django.db.models.fields.DateField: created>...
2) From instance
from posts.model import BlogPost
bp = BlogPost()
all_fields = bp._meta.fields
3) From parent model
Let's suppose that we have Post as the parent model and you want to see all the fields in a list, and have the parent fields to be read-only in Edit mode.
from django.contrib import admin
from posts.model import BlogPost
#admin.register(BlogPost)
class BlogPost(admin.ModelAdmin):
all_fields = [f.name for f in Organisation._meta.fields]
parent_fields = BlogPost.get_deferred_fields(BlogPost)
list_display = all_fields
read_only = parent_fields
The get_all_related_fields() method mentioned herein has been deprecated in 1.8. From now on it's get_fields().
>> from django.contrib.auth.models import User
>> User._meta.get_fields()
I find adding this to django models quite helpful:
def __iter__(self):
for field_name in self._meta.get_all_field_names():
value = getattr(self, field_name, None)
yield (field_name, value)
This lets you do:
for field, val in object:
print field, val
This does the trick. I only test it in Django 1.7.
your_fields = YourModel._meta.local_fields
your_field_names = [f.name for f in your_fields]
Model._meta.local_fields does not contain many-to-many fields. You should get them using Model._meta.local_many_to_many.
It is not clear whether you have an instance of the class or the class itself and trying to retrieve the fields, but either way, consider the following code
Using an instance
instance = User.objects.get(username="foo")
instance.__dict__ # returns a dictionary with all fields and their values
instance.__dict__.keys() # returns a dictionary with all fields
list(instance.__dict__.keys()) # returns list with all fields
Using a class
User._meta.__dict__.get("fields") # returns the fields
# to get the field names consider looping over the fields and calling __str__()
for field in User._meta.__dict__.get("fields"):
field.__str__() # e.g. 'auth.User.id'
def __iter__(self):
field_names = [f.name for f in self._meta.fields]
for field_name in field_names:
value = getattr(self, field_name, None)
yield (field_name, value)
This worked for me in django==1.11.8
A detail not mentioned by others:
[f.name for f in MyModel._meta.get_fields()]
get, for example
['id', 'name', 'occupation']
and
[f.get_attname() for f in MyModel._meta.get_fields()]
get
['id', 'name', 'occupation_id']
If
reg = MyModel.objects.first()
then
reg.occupation
get, for example
<Occupation: Dev>
and
reg.occupation_id
get
1
MyModel._meta.get_all_field_names() was deprecated several versions back and removed in Django 1.10.
Here's the backwards-compatible suggestion from the docs:
from itertools import chain
list(set(chain.from_iterable(
(field.name, field.attname) if hasattr(field, 'attname') else (field.name,)
for field in MyModel._meta.get_fields()
# For complete backwards compatibility, you may want to exclude
# GenericForeignKey from the results.
if not (field.many_to_one and field.related_model is None)
)))
Just to add, I am using self object, this worked for me:
[f.name for f in self.model._meta.get_fields()]
At least with Django 1.9.9 -- the version I'm currently using --, note that .get_fields() actually also "considers" any foreign model as a field, which may be problematic. Say you have:
class Parent(models.Model):
id = UUIDField(primary_key=True)
class Child(models.Model):
parent = models.ForeignKey(Parent)
It follows that
>>> map(lambda field:field.name, Parent._model._meta.get_fields())
['id', 'child']
while, as shown by #Rockallite
>>> map(lambda field:field.name, Parent._model._meta.local_fields)
['id']
So before I found this post, I successfully found this to work.
Model._meta.fields
It works equally as
Model._meta.get_fields()
I'm not sure what the difference is in the results, if there is one. I ran this loop and got the same output.
for field in Model._meta.fields:
print(field.name)
In sometimes we need the db columns as well:
def get_db_field_names(instance):
your_fields = instance._meta.local_fields
db_field_names=[f.name+'_id' if f.related_model is not None else f.name for f in your_fields]
model_field_names = [f.name for f in your_fields]
return db_field_names,model_field_names
Call the method to get the fields:
db_field_names,model_field_names=get_db_field_names(Mymodel)
Combined multiple answers of the given thread (thanks!) and came up with the following generic solution:
class ReadOnlyBaseModelAdmin(ModelAdmin):
def has_add_permission(self, request):
return request.user.is_superuser
def has_delete_permission(self, request, obj=None):
return request.user.is_superuser
def get_readonly_fields(self, request, obj=None):
return [f.name for f in self.model._meta.get_fields()]
Why not just use that:
manage.py inspectdb
Example output:
class GuardianUserobjectpermission(models.Model):
id = models.IntegerField(primary_key=True) # AutoField?
object_pk = models.CharField(max_length=255)
content_type = models.ForeignKey(DjangoContentType, models.DO_NOTHING)
permission = models.ForeignKey(AuthPermission, models.DO_NOTHING)
user = models.ForeignKey(CustomUsers, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'guardian_userobjectpermission'
unique_together = (('user', 'permission', 'object_pk'),)

Django SELECT (1) AS [a] FROM [my_table] WHERE ([my_table].[id] = ? AND NOT ([my_table].[id] = ? )) (1, 1)

Why is Django executing statements such as this:
SELECT (1) AS [a] FROM [my_table]
WHERE ([my_table].[id] = ?
AND NOT ([my_table].[id] = ? )) (1, 1)
This happens when calling is_valid() on a formset created the following way:
MyFormSet = modelformset_factory(Table, fields=['my_field'], extra=0)
my_form_set = MyFormSet(request.POST,
queryset=Table.objects.all())
where Table and MyForm are as simple as, say:
class Table(models.Model):
my_field = models.CharField(max_length=10)
class MyForm(forms.ModelForm):
class Meta:
model = Table
Hint: I looked at the call stack and the code responsible for it (in django/forms/models.py) is below:
def _perform_unique_checks(self, unique_checks):
import pdb; pdb.set_trace()
bad_fields = set()
form_errors = []
for unique_check in unique_checks:
# Try to look up an existing object with the same values as this
# object's values for all the unique field.
lookup_kwargs = {}
for field_name in unique_check:
lookup_value = self.cleaned_data[field_name]
# ModelChoiceField will return an object instance rather than
# a raw primary key value, so convert it to a pk value before
# using it in a lookup.
if isinstance(self.fields[field_name], ModelChoiceField):
lookup_value = lookup_value.pk
lookup_kwargs[str(field_name)] = lookup_value
qs = self.instance.__class__._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
if self.instance.pk is not None:
qs = qs.exclude(pk=self.instance.pk)
Basically the pk is both included for the uniqueness check and excluded. Looks like Django can be smarter and avoid such inefficiency.
I haven't looked at it in detail, but I think you are right that Django could shortcut this query. Please file a ticket at http://code.djangoproject.com/.
Looks like this has been fixed already in trunk (by adding new functionality that also fixes this particular problem)