I've a 2 databases which I've been running on a single machine. my question is how how can I add More database server along with that I wanna use django sharding so how can I shard my data from both database according to users means user 1 data from db1 and db2 on 1st server user 2's data on another
You need to define a router in settings:
DATABASE_ROUTERS = ['routers.routers.ModelDatabaseRouter']
in routers create file routers:
class ModelDatabaseRouter(object):
"""Allows each model to set its own destiny"""
def db_for_read(self, model, **hints):
# Specify target database with field in_db in model's Meta class
if hasattr(model._meta, 'in_db'):
return model._meta.in_db
return None
def db_for_write(self, model, **hints):
# Specify target database with field in_db in model's Meta class
if hasattr(model._meta, 'in_db'):
return model._meta.in_db
return None
def allow_syncdb(self, db, model):
# Specify target database with field in_db in model's Meta class
if hasattr(model._meta, 'in_db'):
if model._meta.in_db == db:
return True
else:
return False
else:
# Random models that don't specify a database can only go to 'default'
if db == 'default':
return True
else:
return False
(from ) https://djangosnippets.org/snippets/2687/
Now you can define the DB in your models like this:
class YourClass(models.Model):
name = models.CharField(primary_key=True, max_length=50)
creation_time = models.DateTimeField()
class Meta:
in_db = 'api' <-- THIS
The db must be defined in settings:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'XXXXX',
'USER': 'XXXXX',
'PASSWORD': 'XXXXXX',
'HOST': '127.0.0.1',
'PORT': '3306',
},
'api': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'xxxx',
'USER': 'xxxx',
'PASSWORD': 'xxxxxx',
'HOST': '127.0.0.1',
'PORT': '3306',
}
}
Related
I tried multiple databases in django. So, I set databases like that
# settings.py
DATABASE_ROUTERS = [
'stage1.routers.MultiDBRouter',
]
DATABASE_APPS_MAPPING = {
'stage1': 'stage1',
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
},
'stage1': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'stage1',
'HOST': 'localhost',
'USER': 'root',
'PASSWORD': '######',
'PORT': 3306,
'CHARSET': 'utf8mb4',
},
}
# stage1.routers.py
class MultiDBRouter(object):
def __init__(self):
self.model_list = ['stage1']
def db_for_read(self, model, **hints):
if model._meta.app_label in self.model_list:
return model._meta.app_label
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == 'stage1':
return 'stage1'
return None
def allow_relation(self, obj1, obj2, **hints):
if (obj1._meta.app_label in self.model_list or obj2._meta.app_label in self.model_list):
return True
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
if app_label == 'stage1':
return db == 'stage1'
return None
# stage1.models.py
from django.db import models
class Room(models.Model):
name = models.CharField(max_length=100)
sort = models.CharField(max_length=100)
class Meta:
app_label = "stage1"
I did manage.py migrate --database=stage1 and it worked. But there are something wrong I didn't intend.
I just wanted stage1 database has only one table room. But it has all tables that are basically set like auth_group, django_session ...
How can I do to make only one table room in stage1 database?
Please help me.
For example I have these two databases. Now while creating a model class, how do I tell django to initiate that model table in the 'movie' database and not in the 'default' one.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'projectInfo',
'USER': 'root',
'PASSWORD': '123#abc',
},
'movie': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'movieDB',
'USER': 'root',
'PASSWORD': '123#abc',
}
}
You can construct a database router [Django-doc]. This specifies, based on the model to what database it should route. For example if you want to route the movie.Movie model in the movie model, you use:
class MyRouter:
special_routing = {
'movie.Movie': 'movie'
}
def db_for_read(self, model, **hints):
return self.special_routing.get(model._meta.label)
def db_for_write(self, model, **hints):
return self.special_routing.get(model._meta.label)
def allow_relation(self, obj1, obj2, **hints):
return self.special_routing.get(obj1._model._meta.label) == self.special_routing.get(obj2._model._meta.label)
def allow_migrate(self, db, app_label, model_name=None, **hints):
return db == self.special_routing.get(f'{app_label}_{model_name}', 'default')
Then we can add this to the DATABASE_ROUTERS setting [Django-doc]:
DATABASE_ROUTERS = ['path.to.MyRouter']
You can write a more sophisticated router that thus will hint to what database to write.
I am trying to implement multiple database support for my django (version 1.11) app. For that purpose I have included in my settings.py:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'labrin_dbase',
'USER': 'labrin_admin',
'PASSWORD': 'ndzwwZHv63STuvAF?C_$L#j#*#epZXaX',
'HOST': 'localhost',
'PORT': '5432',
},
'comment': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'labrin_dbase_comments',
'USER': 'labrin_admin_comments',
'PASSWORD': 'adzwaTHv63STuvAF!C_$L#j#*#epZXaY',
'HOST': 'localhost',
'PORT': '5433',
}
}
DATABASE_ROUTERS = [
'labrin_task.comment_router.CommentRouter',
]
And my database router is configured as below:
class CommentRouter(object):
def db_for_read(self, model, **hints):
if model._meta.db_table == 'todo_comment':
return 'comment'
return None
def db_for_write(self, model, **hints):
if model._meta.db_table == 'todo_comment':
return 'comment'
return None
def allow_relation(self, obj1, obj2, **hints):
return True
def allow_migrate(self, db, app_label, model_name=None, **hints):
if model_name == 'comment':
return db == 'comment'
return None
Models in my "todo" app(which is only app in project):
from django.db import models
from django.contrib.auth import get_user_model
UserModel = get_user_model()
class Todo(models.Model):
name = models.CharField(max_length=64)
description = models.TextField()
author = models.ForeignKey(UserModel, on_delete=models.CASCADE)
deadline = models.DateTimeField()
created_at = models.DateTimeField(auto_now_add=True)
class Comment(models.Model):
todo = models.ForeignKey(Todo, on_delete=models.CASCADE)
author = models.ForeignKey(UserModel, on_delete=models.CASCADE)
text = models.CharField(max_length=256)
created_at = models.DateTimeField(auto_now_add=True)
class ShareTodo(models.Model):
todo = models.ForeignKey(Todo, on_delete=models.CASCADE)
with_user = models.ForeignKey(UserModel, on_delete=models.CASCADE)
comment_allowed = models.BooleanField(default=False)
When I remove comment database and DATABASE_ROUTERS from settings.py, my app is working normally. After adding mentioned to settings.py, my app returns an error when I create Comment object. The error says:
Exception inside application: insert or update on table "todo_comment" violates foreign key constraint "todo_comment_author_id_bb272a3e_fk_auth_user_id"
DETAIL: Key (author_id)=(1) is not present in table "auth_user". What am I doing wrong?
Note: I am starting two postgres servers as separate docker containers and after running containers, I run python manage.py migrate and python manage.py migrate --database=comment for making all migrations.
Sorry, but cross-database relations are not possible to recreate in Django. You can find full explanation in Django docs.
Furthermore, you cannot even do cross-database relations in PostgreSQL so even trying to hack it or to achieve it outside of Django won't be possible. Maybe for other database engines it is possible, you can do your own research.
I have implemented Automatic DB Routing in Django and using AWS Aurora for Database with replication. I have found a minor replication lag with my database which hampering the flow. Issue occurs let's say when a read queryset is getting executed with 'slave' then while updating value using that queryset showing error something like 'read-only access for that table.' that means for update it should route to master db.
Here is my DB Settings for Multiple DB:
DATABASES = {
'master': {
'ENGINE': 'django.db.backends.mysql',
'STORAGE_ENGINE': 'MyISAM / INNODB / ETC',
'NAME': 'db',
'USER': 'master',
'PASSWORD': 'master',
'HOST': 'localhost',
'PORT': '3306',
},
'slave': {
'ENGINE': 'django.db.backends.mysql',
'STORAGE_ENGINE': 'MyISAM / INNODB / ETC',
'NAME': 'db',
'USER': 'name',
'PASSWORD': 'pass',
'HOST': 'localhost',
'PORT': '3306',
},
'slave2': {
'ENGINE': 'django.db.backends.mysql',
'STORAGE_ENGINE': 'MyISAM / INNODB / ETC',
'NAME': 'db',
'USER': 'name',
'PASSWORD': 'pass',
'HOST': 'localhost',
'PORT': '3306',
}
}
DATABASE_ROUTERS = ['path.to.AuthRouter']
Please provide me the best way to handle multiple db route automatically in django.
"""
DB Router core class which auto selects required database configuration
"""
class AuthRouter:
def db_for_read(self, model, **hints):
"""
Reads go to a replica.
"""
print 'db_for_read'
print model
return 'slave'
def db_for_write(self, model, **hints):
"""
Writes always go to master ie default.
"""
print 'db_for_write'
print model
return 'master'
def allow_relation(self, obj1, obj2, **hints):
"""
Relations between objects are allowed if both objects are
in the default/replica pool.
"""
db_list = ('master', 'slave')
if obj1._state.db in db_list and obj2._state.db in db_list:
return True
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
"""
All non-auth models end up in this pool.
"""
return True
You can split the read operations and isolate the writing operations for master DB like:
AuthRouter should call master DB since that's the fresh information from users.
class AuthRouter:
def db_for_read(self, model, **hints):
"""
Reads go to a replica.
"""
return 'master'
def db_for_write(self, model, **hints):
"""
Writes always go to master ie default.
"""
return 'master'
def allow_relation(self, obj1, obj2, **hints):
"""
Relations between objects are allowed if both objects are
in the default/replica pool.
"""
db_list = ('master', 'slave')
if obj1._state.db in db_list and obj2._state.db in db_list:
return True
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
"""
All non-auth models end up in this pool.
"""
return True
class PrimaryReplicaRouter(object):
"""
A router to control all read/write database operations.
"""
def db_for_read(self, model, **hints):
"""
Reads go to a randomly-chosen replica.
"""
return select_rand_db()
def db_for_write(self, model, **hints):
"""
Writes always go to primary.
"""
return 'master'
def allow_relation(self, obj1, obj2, **hints):
"""
Relations between objects are allowed if both objects are
in the primary/replica pool.
"""
return True
def allow_migrate(self, db, app_label, model_name=None, **hints):
"""
All non-auth models end up in this pool.
"""
return True
def select_rand_db():
from numpy.random import choice
"""
this function returns rand db or default if running tests
:return:
"""
return choice(['master', 'slave'], p=[0.5, 0.5])
Is that possible to have model with foreign key fields on different databases?
example:
class MultiBDModel(models.Model):
db1_user = models.ForeignKey(User) # here suppose to be foreign key on `db1`
db2_user = models.ForeignKey(User) # and here on `db2`
maybe copy somehow User. Apply for it custom manager. Which returns query set with using='db1'
in settings.py:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'db1', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
},
'website': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'db2', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3. # Set to empty string for default. Not used with sqlite3.
}
}
No. As written by #ignacio-vazquez-abrams, one model must have all fields in the same database.
BUT
As an alternative, you could use a proxy model to link between models from two different databases.
Aim
One model should provide the fields from db1 and db2 at the same time
General trick
You have the model ContactMessage from db1, that we will name legacy_db. We suppose you don't want to touch to this model since it comes from another project.
Create a proxy model ProxyContactMessage, it has the same attributes than ContactMessage.
Use a database router to tell Django where to look in legacy_db for ProxyContactMessage objects.
Add a new model ExtendedContactMessage with the fields you would like to add. Declare a OneToOneField with ProxyContactMessage. This data will be saved to your db2 django_db.
Your proxy model cannot hold the new fields since it's abstract, but it can have methods that ask the related ExtendedContactMessage object (if any). Add the callables you want.
Example
In your legacy_app/models.py, the model on db1 legacy_db is:
class ContactMessage(models.Model):
subject = models.CharField(max_length=255)
message = models.TextField()
created_at = models.DateTimeField()
created_by = models.CharField(max_length=255)
class Meta:
managed = False
db_table = 'contact_message'
def __unicode__(self):
return self.subject
Therefore you create in myapp/models.py:
class ProxyContactMessage(ContactMessage):
class Meta:
proxy = True
verbose_name = 'Contact message'
verbose_name_plural = 'Contact messages'
def add_extension(self):
e = ExtendedContactMessage(contact_message=self)
e.save()
return e
def mark_as_processed(self):
try:
e = self.extendedcontactmessage
except ExtendedContactMessage.DoesNotExist:
e = self.add_extension()
e.mark_as_processed()
def processed(self):
return self.extendedcontactmessage.processed
def processed_at(self):
return self.extendedcontactmessage.processed_at
class ExtendedContactMessage(models.Model):
contact_message = models.OneToOneField(ProxyContactMessage)
processed = models.BooleanField(default=False, editable=False)
processed_at = models.DateTimeField(null=True, default=None, editable=False)
def mark_as_processed(self):
self.processed = True
self.processed_at = timezone.now()
self.save()
Note that only the non abstract model ExtendedContactMessage will be saved in db2, since ProxyContactMessage is abstract.
In settings.py, set DATABASE_ROUTERS with the class
class LegacyRouter(object):
"""
A router to control all database operations on models in the
legacy database.
"""
def db_for_read(self, model, **hints):
if model.__name__ == 'ProxyContactMessage':
return 'legacy_db'
return None
def db_for_write(self, model, **hints):
"""
Attempts to write in legacy DB for ContactMessage.
"""
if model.__name__ == 'ProxyContactMessage':
return 'legacy_db'
return None
Your default router sends everything to db2.
Finally you may have an admin class like:
def mark_as_processed(modeladmin, request, queryset):
for obj in queryset:
obj.mark_as_processed()
mark_as_processed.short_description = "Mark as processed"
class ProxyContactMessageAdmin(admin.ModelAdmin):
list_display = (
'subject',
'message',
'created_at',
'created_by',
'processed',
'processed_at',
)
actions = (mark_as_processed,)
admin.site.register(ProxyContactMessage, ProxyContactMessageAdmin)
Related:
Use a router for the proxy class
"Hack" the app_name in Meta
Catch the queryset
No. The ORM cannot do anything the database engine isn't capable of.