I am trying to implement multiple database support for my django (version 1.11) app. For that purpose I have included in my settings.py:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'labrin_dbase',
'USER': 'labrin_admin',
'PASSWORD': 'ndzwwZHv63STuvAF?C_$L#j#*#epZXaX',
'HOST': 'localhost',
'PORT': '5432',
},
'comment': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'labrin_dbase_comments',
'USER': 'labrin_admin_comments',
'PASSWORD': 'adzwaTHv63STuvAF!C_$L#j#*#epZXaY',
'HOST': 'localhost',
'PORT': '5433',
}
}
DATABASE_ROUTERS = [
'labrin_task.comment_router.CommentRouter',
]
And my database router is configured as below:
class CommentRouter(object):
def db_for_read(self, model, **hints):
if model._meta.db_table == 'todo_comment':
return 'comment'
return None
def db_for_write(self, model, **hints):
if model._meta.db_table == 'todo_comment':
return 'comment'
return None
def allow_relation(self, obj1, obj2, **hints):
return True
def allow_migrate(self, db, app_label, model_name=None, **hints):
if model_name == 'comment':
return db == 'comment'
return None
Models in my "todo" app(which is only app in project):
from django.db import models
from django.contrib.auth import get_user_model
UserModel = get_user_model()
class Todo(models.Model):
name = models.CharField(max_length=64)
description = models.TextField()
author = models.ForeignKey(UserModel, on_delete=models.CASCADE)
deadline = models.DateTimeField()
created_at = models.DateTimeField(auto_now_add=True)
class Comment(models.Model):
todo = models.ForeignKey(Todo, on_delete=models.CASCADE)
author = models.ForeignKey(UserModel, on_delete=models.CASCADE)
text = models.CharField(max_length=256)
created_at = models.DateTimeField(auto_now_add=True)
class ShareTodo(models.Model):
todo = models.ForeignKey(Todo, on_delete=models.CASCADE)
with_user = models.ForeignKey(UserModel, on_delete=models.CASCADE)
comment_allowed = models.BooleanField(default=False)
When I remove comment database and DATABASE_ROUTERS from settings.py, my app is working normally. After adding mentioned to settings.py, my app returns an error when I create Comment object. The error says:
Exception inside application: insert or update on table "todo_comment" violates foreign key constraint "todo_comment_author_id_bb272a3e_fk_auth_user_id"
DETAIL: Key (author_id)=(1) is not present in table "auth_user". What am I doing wrong?
Note: I am starting two postgres servers as separate docker containers and after running containers, I run python manage.py migrate and python manage.py migrate --database=comment for making all migrations.
Sorry, but cross-database relations are not possible to recreate in Django. You can find full explanation in Django docs.
Furthermore, you cannot even do cross-database relations in PostgreSQL so even trying to hack it or to achieve it outside of Django won't be possible. Maybe for other database engines it is possible, you can do your own research.
Related
I tried multiple databases in django. So, I set databases like that
# settings.py
DATABASE_ROUTERS = [
'stage1.routers.MultiDBRouter',
]
DATABASE_APPS_MAPPING = {
'stage1': 'stage1',
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
},
'stage1': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'stage1',
'HOST': 'localhost',
'USER': 'root',
'PASSWORD': '######',
'PORT': 3306,
'CHARSET': 'utf8mb4',
},
}
# stage1.routers.py
class MultiDBRouter(object):
def __init__(self):
self.model_list = ['stage1']
def db_for_read(self, model, **hints):
if model._meta.app_label in self.model_list:
return model._meta.app_label
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == 'stage1':
return 'stage1'
return None
def allow_relation(self, obj1, obj2, **hints):
if (obj1._meta.app_label in self.model_list or obj2._meta.app_label in self.model_list):
return True
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
if app_label == 'stage1':
return db == 'stage1'
return None
# stage1.models.py
from django.db import models
class Room(models.Model):
name = models.CharField(max_length=100)
sort = models.CharField(max_length=100)
class Meta:
app_label = "stage1"
I did manage.py migrate --database=stage1 and it worked. But there are something wrong I didn't intend.
I just wanted stage1 database has only one table room. But it has all tables that are basically set like auth_group, django_session ...
How can I do to make only one table room in stage1 database?
Please help me.
I have two postgres db in my project, one for every app: app1:Store, app2:Warehouse. Both of them have Order model which django names: store_order & warehouse_order (I am fine with that part);
But after migrations there is a problem, both store_order and warehouse_order got in warehouse_db, also all these django tables I want to be only in store_db:
github repo
settings.py
DATABASES = {
'default': {},
'store': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'store_db',
'PORT': 5432,
},
'warehouse': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'warehouse_db',
'PORT': 5432,
}
}
DATABASE_ROUTERS = ['warehouse.router.WarehouseRouter', 'store.router.StoreRouter']
warehouse/router.py
class WarehouseRouter:
"""
A router to control operations in warehouse app
"""
def db_for_read(self, model, **hints):
if model._meta.app_label == 'warehouse':
return 'warehouse'
else:
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == 'warehouse':
return 'warehouse'
else:
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
if app_label == 'warehouse':
return db == 'warehouse'
return None
store/router.py
class StoreRouter:
"""
A router to control operations in store app
"""
def db_for_read(self, model, **hints):
return 'store'
def db_for_write(self, model, **hints):
return 'store'
def allow_migrate(self, db, app_label, model_name=None, **hints):
return True
warehouse/models.py also same as store/models.py except of app_label
class Order(models.Model):
id = models.CharField(max_length=128, null=False, unique=True, primary_key=True, default=uuid.uuid1)
amount = models.IntegerField(null=False)
price = models.FloatField(null=False)
comment = models.TextField(null=True)
created_at = models.DateTimeField(null=False, auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return f'{self.id}: {self.created_at}'
class Meta:
app_label = 'warehouse'
I tried to be as close as possible to this example
and my migrations script
#!/bin/sh -
docker-compose run crestopher python manage.py makemigrations
docker-compose run crestopher python manage.py migrate --database=warehouse
docker-compose run crestopher python manage.py migrate --database=store
I also tried to do makemigrations and migrate with one db and then with other, but result was the same.
EDIT
I just run it again on the next day and got:
I don't understand why. I changed migrations script to:
#!/bin/sh -
docker-compose run crestopher python manage.py makemigrations warehouse
docker-compose run crestopher python manage.py migrate --database=warehouse
docker-compose run crestopher python manage.py makemigrations store
docker-compose run crestopher python manage.py migrate --database=store
but on the first day I run it about 1000 times without effect. I'll try to understand what is going on, also these default django tables still should be only in store db. I'll put an answer here if I'll find one.
I've a 2 databases which I've been running on a single machine. my question is how how can I add More database server along with that I wanna use django sharding so how can I shard my data from both database according to users means user 1 data from db1 and db2 on 1st server user 2's data on another
You need to define a router in settings:
DATABASE_ROUTERS = ['routers.routers.ModelDatabaseRouter']
in routers create file routers:
class ModelDatabaseRouter(object):
"""Allows each model to set its own destiny"""
def db_for_read(self, model, **hints):
# Specify target database with field in_db in model's Meta class
if hasattr(model._meta, 'in_db'):
return model._meta.in_db
return None
def db_for_write(self, model, **hints):
# Specify target database with field in_db in model's Meta class
if hasattr(model._meta, 'in_db'):
return model._meta.in_db
return None
def allow_syncdb(self, db, model):
# Specify target database with field in_db in model's Meta class
if hasattr(model._meta, 'in_db'):
if model._meta.in_db == db:
return True
else:
return False
else:
# Random models that don't specify a database can only go to 'default'
if db == 'default':
return True
else:
return False
(from ) https://djangosnippets.org/snippets/2687/
Now you can define the DB in your models like this:
class YourClass(models.Model):
name = models.CharField(primary_key=True, max_length=50)
creation_time = models.DateTimeField()
class Meta:
in_db = 'api' <-- THIS
The db must be defined in settings:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'XXXXX',
'USER': 'XXXXX',
'PASSWORD': 'XXXXXX',
'HOST': '127.0.0.1',
'PORT': '3306',
},
'api': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'xxxx',
'USER': 'xxxx',
'PASSWORD': 'xxxxxx',
'HOST': '127.0.0.1',
'PORT': '3306',
}
}
I have a hard time with creating data migrations. I use two databases for my apps. I configured databases in settings.py and also created a router like in Django docs.
# settings.py
DB_HOST = 'localhost'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'helios',
'HOST': DB_HOST,
'OPTIONS': {
'read_default_file': join(dirname(__file__), 'default.cnf'),
},
},
'other': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'gala_pol',
'HOST': DB_HOST,
'OPTIONS': {
'read_default_file': join(dirname(__file__), 'other.cnf'),
},
},
DATABASE_APPS_MAPPING = {
'contenttypes': 'default',
'auth': 'default',
'admin': 'default',
'sessions': 'default',
'messages': 'default',
'staticfiles': 'default',
'woodsmen': 'default',
'helios': 'default',
'hush': 'default',
'hunt': 'other',
'meat': 'other',
'beast': 'other',
}
# routers.py
class DatabaseAppsRouter(object):
def db_for_read(self, model, **hints):
if model._meta.app_label in settings.DATABASE_APPS_MAPPING:
return settings.DATABASE_APPS_MAPPING[model._meta.app_label]
return None
def db_for_write(self, model, **hints):
if model._meta.app_label in settings.
return settings.DATABASE_APPS_MAPPING[model._meta.app_label]
return None
def allow_relation(self, obj1, obj2, **hints):
db1 = settings.DATABASE_APPS_MAPPING.get(obj1._meta.app_label)
db2 = settings.DATABASE_APPS_MAPPING.get(obj2._meta.app_label)
if db1 and db2:
return db1 == db2
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
if db in settings.DATABASE_APPS_MAPPING.values():
return settings.DATABASE_APPS_MAPPING.get(app_label) == db
elif app_label in settings.DATABASE_APPS_MAPPING:
return False
Here is the model and migrations of one of those apps:
# hunt.models.py
class Dish(models.Model):
"""
Investigation case
"""
display_name = models.CharField(max_length=64, unique=True)
department = models.ForeignKey(Kitchen, null=True)
case_type = models.PositiveSmallIntegerField(choices=CASE_TYPE_CHOICES, default=DEF_CASE_TYPE)
created_at = models.DateTimeField(blank=True, null=True)
comment = models.CharField(max_length=256, blank=True, null=True)
class Meta:
verbose_name = 'case'
app_label = 'hunt'
def __unicode__(self):
return (u'%s (%s)' % (self.display_name, self.created_at)).strip()
# hunt.migrations.0001_initial.py
class Migration(migrations.Migration):
app_label = 'hunt'
dependencies = [
]
operations = [
migrations.CreateModel(
name='Dish',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, primary_key=True, serialize=False)),
('display_name', models.CharField(max_length=64, unique=True)),
('case_type', models.PositiveSmallIntegerField(default=0, choices=[(0, 'Unknown'), (1, 'General'), (2, 'Terror'), (3, 'Narco'), (4, 'Fraud'), (5, 'Slavery'), (6, 'Traffic'), (7, 'RICO'), (8, 'War'), (9, 'Cyber'), (20, 'Other')])),
('created_at', models.DateTimeField(null=True, blank=True)),
('comment', models.CharField(max_length=256, null=True, blank=True)),
],
options={
'verbose_name': 'case',
},
),
]
# hunt.migrations.0002_add_hunts.py
def create_initial_hunts(apps, schema_editor):
if settings.DEBUG:
print('\nContent added')
class Migration(migrations.Migration):
dependencies = [
('hunt', '0001_initial'),
]
operations = [
migrations.RunPython(create_initial_hunts, hints={'schema_editor': 'other'}),
]
The problem is:
When i run "migrate" command, only applications that connected to default database are migrated. The migrations in rest of the apps are never run. If I launch migrate for such an app with --database option - it works fine.
How can I specify the database per migration? Isn't the router supposed to manage exactly this? Or I missed something else?
You have to run migrate once for each database, specifying the target with --database. Each time it will consult your router to see which migrations to actually perform on that database.
I'm guessing it was designed this way to favor explicitness over implicitness. For example, your workflow might require you to migrate the different databases at different times.
Note, though, that you won't be able to tell from the output which migrations were actually performed, since:
If allow_migrate() returns False, any migration operations for the model_name will be silently skipped when running migrate on the db.
Using these nice helpers you can run Python/SQL migrations on specific Database
[Helpers]
from django.db.migrations import RunPython, RunSQL
def run_python_specific_db_migration(migration_func, use_db):
"""calls RunPython command only for specific database """
return RunPython(migration_func, hints={'use_db': use_db})
def run_sql_specific_db_migration(sql_commands, use_db):
"""Runs one or list of sql commands only on specific database """
return RunSQL(sql_commands, hints={'use_db': use_db})
# ** Your specific db_helpers for your DB_KEY **
def run_sql_your_db_migration(sql_commands):
return run_sql_specific_db_migration(sql_commands, use_db=DB_KEY)
def run_python_your_db_migration(migration_func):
return run_python_specific_db_migration(migration_func, use_db=DB_KEY)
[Usage]
def data_migration(apps, schema_editor):
...your data migration logic..better to wrap with #atomic...
class Migration(migrations.Migration):
operations = [ run_python_your_db_migration(data_migration) ]
Is that possible to have model with foreign key fields on different databases?
example:
class MultiBDModel(models.Model):
db1_user = models.ForeignKey(User) # here suppose to be foreign key on `db1`
db2_user = models.ForeignKey(User) # and here on `db2`
maybe copy somehow User. Apply for it custom manager. Which returns query set with using='db1'
in settings.py:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'db1', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
},
'website': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'db2', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3. # Set to empty string for default. Not used with sqlite3.
}
}
No. As written by #ignacio-vazquez-abrams, one model must have all fields in the same database.
BUT
As an alternative, you could use a proxy model to link between models from two different databases.
Aim
One model should provide the fields from db1 and db2 at the same time
General trick
You have the model ContactMessage from db1, that we will name legacy_db. We suppose you don't want to touch to this model since it comes from another project.
Create a proxy model ProxyContactMessage, it has the same attributes than ContactMessage.
Use a database router to tell Django where to look in legacy_db for ProxyContactMessage objects.
Add a new model ExtendedContactMessage with the fields you would like to add. Declare a OneToOneField with ProxyContactMessage. This data will be saved to your db2 django_db.
Your proxy model cannot hold the new fields since it's abstract, but it can have methods that ask the related ExtendedContactMessage object (if any). Add the callables you want.
Example
In your legacy_app/models.py, the model on db1 legacy_db is:
class ContactMessage(models.Model):
subject = models.CharField(max_length=255)
message = models.TextField()
created_at = models.DateTimeField()
created_by = models.CharField(max_length=255)
class Meta:
managed = False
db_table = 'contact_message'
def __unicode__(self):
return self.subject
Therefore you create in myapp/models.py:
class ProxyContactMessage(ContactMessage):
class Meta:
proxy = True
verbose_name = 'Contact message'
verbose_name_plural = 'Contact messages'
def add_extension(self):
e = ExtendedContactMessage(contact_message=self)
e.save()
return e
def mark_as_processed(self):
try:
e = self.extendedcontactmessage
except ExtendedContactMessage.DoesNotExist:
e = self.add_extension()
e.mark_as_processed()
def processed(self):
return self.extendedcontactmessage.processed
def processed_at(self):
return self.extendedcontactmessage.processed_at
class ExtendedContactMessage(models.Model):
contact_message = models.OneToOneField(ProxyContactMessage)
processed = models.BooleanField(default=False, editable=False)
processed_at = models.DateTimeField(null=True, default=None, editable=False)
def mark_as_processed(self):
self.processed = True
self.processed_at = timezone.now()
self.save()
Note that only the non abstract model ExtendedContactMessage will be saved in db2, since ProxyContactMessage is abstract.
In settings.py, set DATABASE_ROUTERS with the class
class LegacyRouter(object):
"""
A router to control all database operations on models in the
legacy database.
"""
def db_for_read(self, model, **hints):
if model.__name__ == 'ProxyContactMessage':
return 'legacy_db'
return None
def db_for_write(self, model, **hints):
"""
Attempts to write in legacy DB for ContactMessage.
"""
if model.__name__ == 'ProxyContactMessage':
return 'legacy_db'
return None
Your default router sends everything to db2.
Finally you may have an admin class like:
def mark_as_processed(modeladmin, request, queryset):
for obj in queryset:
obj.mark_as_processed()
mark_as_processed.short_description = "Mark as processed"
class ProxyContactMessageAdmin(admin.ModelAdmin):
list_display = (
'subject',
'message',
'created_at',
'created_by',
'processed',
'processed_at',
)
actions = (mark_as_processed,)
admin.site.register(ProxyContactMessage, ProxyContactMessageAdmin)
Related:
Use a router for the proxy class
"Hack" the app_name in Meta
Catch the queryset
No. The ORM cannot do anything the database engine isn't capable of.