Migrate Django model to Postgresql schema - django

I would like to create a new table in a specific Postgresql schema (i.e. "schema1) from a Django migration.
Despite following approach 1 from this blog or this post, the migration sends the table to the default schema "public" instead of "schema1".
In settings.py, I have:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'OPTIONS': {
'options': '-c search_path=django,public'
},
'NAME': 'myDB',
'USER': 'username',
'PASSWORD': '***',
'HOST': 'my.host.address',
'PORT': '1234',
},
'schema1': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'OPTIONS': {
'options': '-c search_path=schema1,public'
},
'NAME': 'myDB',
'USER': 'username',
'PASSWORD': '***',
'HOST': 'my.host.address',
'PORT': '1234',
}
}
#Path to DBrouter to handle PG schemas https://stackoverflow.com/a/51007441/3976696
DATABASE_ROUTERS = ('djangogirls.dbrouters.MyDBRouter',)
In djangogirls/dbrouters.py, I have:
from legacydbapp.models import MyUser
# Include here any class (i.e. table) that belongs to the schema "schema1"
ROUTED_MODELS_SCHEMA1 = [MyUser]
class MyDBRouter(object):
"""
A router to place DB queries into correct schema depending on considered tables.
"""
def db_for_read(self, model, **hints):
if model in ROUTED_MODELS_SCHEMA1 :
return 'schema1'
return None
def db_for_write(self, model, **hints):
if model in ROUTED_MODELS_SCHEMA1 :
return 'schema1'
return None
And the model class I'm trying to migrate, in models.py:
class MyUser(models.Model):
first_name = models.CharField(max_length=30, default='',null=True, blank=True)
last_name = models.CharField(max_length=30, default='', null=True, blank=True)
profession = models.CharField(max_length=32,default='', null=True, blank=True)
def __str__(self):
return self.first_name + " " + self.last_name
class Meta:
managed = True
db_table = 'myuser'
I ran the following commands:
$ python manage.py makemigrations legacydbapp
$ python manage.py sqlmigrate legacydbapp 0001_initial
$ python manage.py migrate legacydbapp
And the sqlmigrate returns the following SQL:
BEGIN;
--
-- Create model MyUser
--
CREATE TABLE "myuser" (
"id" serial NOT NULL PRIMARY KEY,
"first_name" varchar(30) NULL,
"last_name" varchar(30) NULL,
"profession" varchar(32) NULL);
COMMIT;
If the DB router were working, I would expect the SQL to read instead CREATE TABLE "schema1.myuser", but this isn't the case. Did I mess up somewhere, or is this simply not achievable in Django 2.1.5?

You have to explicitly provide the name of the database definition when running migrate:
$ python manage.py migrate legacydbapp --database schema1
To ensure that the model MyUser is only created in a specific database, your router has to implement .allow_migrate()

hi to migrate one model to specific scema you can use:
python manage.py migrate legacydbapp -s schema1

Related

django.db.utils.ProgrammingError: type "raster" does not exist

my models: I have created 3 models here and When I migrate then I get the error.
from django.contrib.gis.db import models
from django.contrib.gis.db.models.fields import RasterField
class WorldBorder(models.Model):
# Regular Django fields corresponding to the attributes in the
# world borders shapefile.
name = models.CharField(max_length=50)
area = models.IntegerField()
pop2005 = models.IntegerField('Population 2005')
fips = models.CharField('FIPS Code', max_length=2)
iso2 = models.CharField('2 Digit ISO', max_length=2)
iso3 = models.CharField('3 Digit ISO', max_length=3)
un = models.IntegerField('United Nations Code')
region = models.IntegerField('Region Code')
subregion = models.IntegerField('Sub-Region Code')
lon = models.FloatField()
lat = models.FloatField()
# GeoDjango-specific: a geometry field (MultiPolygonField)
mpoly = models.MultiPolygonField()
# Returns the string representation of the model.
def __str__(self):
return self.name
class Zipcode(models.Model):
code = models.CharField(max_length=5)
poly= models.PolygonField()
class Elevation(models.Model):
name = models.CharField(max_length=100,blank=True, null=True)
rast = RasterField(srid=2346)
my settings .. The database I used is Postgres so that I can use postgis for geodjango
DATABASES = {
"default": {
"ENGINE": "django.contrib.gis.db.backends.postgis",
"NAME": "django_course",
"USER": "postgres",
"PASSWORD": "**************",
"HOST": "localhost",
"PORT": "5432",
}
}
You are missing the PostGIS Raster extension. Create the extension and try again:
CREATE EXTENSION postgis_raster;
Quote from the documentation:
Note that a major change in 3.0 is that the raster functionality has
been broken out as a separate extension.
Use this in your database setting
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'database_name',
'HOST': 'localhost',
'USER': 'user_name',
'PASSWORD': 'password',
'PORT': '5432',
}
}
Add these two lines to your migrations file elevation_zipcode.py:
from django.contrib.postgres.operations import CreateExtension
operations = [
CreateExtension('postgis_raster'),

Can we define database name in models.py in Django?

I have multiple databases defined in settings.py.In models.py I have to use auth_user table from defined datbase rather from the default database.How can we define that in models.py?
I have databases defined in settings.py as below:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'sources',
'USER': 'root',
'PASSWORD': 'cdffd#123',
'HOST': 'xx.xx.xx.xxx',
'PORT': '3306',
},
'abc': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'customers',
'USER': 'root',
'PASSWORD': 'dsgfsd#123',
'HOST': 'xx.xx.xx.xxx',
'PORT': '3306',
},
'xyz': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'users',
'USER': 'root',
'PASSWORD': 'ewet#123',
'HOST': 'xx.xx.xx.xxx',
'PORT': '3306',
},
}
in my models.py i have defined user model as below:
class User(AbstractBaseUser, PermissionsMixin, BaseUserManager):
name_regex = RegexValidator(regex="^[a-zA-Z]+$",message="Enter only Alphabets")#regular expression for name
email_regex = RegexValidator(regex="^\w.+#[a-zA-Z_]+?\.[a-zA-Z]{2,3}$",message="Enter valid Email Id")#regular expression for email
password = models.CharField(_("Password"),max_length=128,default='')
last_login = models.DateTimeField(_('date Last Login'), null=True, blank=True)
is_superuser = models.BooleanField(_("Super User"),default=0)
username = models.CharField(_('Username'), max_length=75,blank=True,null=True)
first_name = models.CharField(_('First name'),default='',blank=True,null=True, max_length=20)
last_name = models.CharField(_('Last name'),default='',blank=True,null=True, max_length=20)
email = models.EmailField(_('Enter your email address'),unique=True,max_length=254,error_messages={'unique':"Email ID already registered.",'invalid':'Enter valid Email ID'})
is_staff = models.BooleanField(_('staff status'), default=False)
is_active = models.BooleanField(_('active'), default=True)#A boolean attribute that indicates whether the user is considered “active”
date_joined = models.DateTimeField(_('Date joined'))
iRoleID = models.IntegerField(_("Role"),default=0)
iModifiedBy = models.IntegerField(_("Modified By"),blank=True,null=True)
dtModifiedOn = models.DateTimeField(_("Modified On"),blank=True,null=True)
apitoken_validity = models.IntegerField(default=0)
authentication_type = models.IntegerField(default=1)
iMSISDN = models.IntegerField(_('Mobile Number'),default=0)
objects = UserManager()
USERNAME_FIELD = 'email'# unique identifier. The field must be unique
REQUIRED_FIELDS = ['username']
def __unicode__(self):
return self.username
def __str__(self):
return self.email
class Meta:
app_label = 'users'
db_table = "auth_user"
By default,it is taking auth_user table from the database defined as default.But,I need auth_user table to be taken from the database defined in xyz. Is that possible? if possible how can i do that ?
You can do that using database routers.
an example would be :
class AuthRouter:
"""
A router to control all database operations on models in the
auth application.
"""
def db_for_read(self, model, **hints):
"""
Attempts to read auth models go to auth_db.
"""
if model._meta.db_table == 'auth_users':
return 'auth_db'
return None
def db_for_write(self, model, **hints):
"""
Attempts to write auth models go to auth_db.
"""
if model._meta.db_table == 'auth_users':
return 'auth_db'
return None
and then add it into settings.py using
DATABASE_ROUTERS = ['path.to.AuthRouter']
ref: Django Documentation

Django and postgresql schemas

I've been trying to solve this one all week, help very much appreciated.
I have various schemas in a postgres db and I would like to be able to map to them from within the same or across different django apps.
Some of the schemas are :
samples
excavation
geophysics
...
I have tried the recommended way, but I'm not getting any data to display from the schemas, I can only connect to the public schema with managed tables. Here is the database connections from the settings.py file.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'OPTIONS': {
'options': '-c search_path=django,public'
},
'NAME': 'gygaia',
'USER': 'appuser',
'PASSWORD': 'secret',
},
'samples': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'OPTIONS': {
'options': '-c search_path=samples,public'
},
'NAME': 'gygaia',
'USER': 'appuser',
'PASSWORD': 'secret',
},
}
source: https://www.amvtek.com/blog/posts/2014/Jun/13/accessing-multiple-postgres-schemas-from-django/
In the model.py I add:
from django.db import models
# Create your models here.
class Storage(models.Model):
#id = models.IntegerField(default=0)
storage_id = models.AutoField(primary_key=True)
store_name = models.CharField(max_length=200, default='')
address_1 = models.CharField(max_length=200, default='')
address_2 = models.CharField(max_length=200, default='')
region = models.CharField(max_length=200, default='')
city = models.CharField(max_length=200, default='')
zip = models.CharField(max_length=200, default='')
country = models.CharField(max_length=200, default="Turkey")
user = models.CharField(max_length=200, default="Gygaia")
datestamp = models.DateTimeField(auto_now=True)
class Meta():
managed=False
db_table = 'samples\".\"store'
I don't want to restrict schemas to users, and the database was created a few years ago so I'm not allowed to bring it all under one schema. I know there are various solutions posted on stackoverflow and other coreners of the internet, I have tried these, but I'm unable to get this to work. Any ideas how to solve thos one??
Because Django does not support Postgres database schemas out of the box, in order to get this to work, use a database router.
I created a test database to try this out with, here's how to reproduce it:
Create a test database with psql:
CREATE USER tester WITH PASSWORD 'lol so easy';
CREATE DATABASE multi_schema_db WITH OWNER tester;
CREATE SCHEMA samples AUTHORIZATION tester;
CREATE TABLE samples.my_samples (
id INTEGER NOT NULL PRIMARY KEY,
description CHAR(255) NOT NULL
);
Add the schemas to the settings as different database connections, remember to add HOST to avoid the “Peer authentication failed” error.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'OPTIONS': {
'options': '-c search_path=django,public'
},
'NAME': 'multi_schema_db',
'USER': 'tester',
'PASSWORD': 'lol so easy',
'HOST': 'localhost'
},
'samples': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'OPTIONS': {
'options': '-c search_path=samples,public'
},
'NAME': 'multi_schema_db',
'USER': 'tester',
'PASSWORD': 'lol so easy',
'HOST': 'localhost'
},
}
Next create the MySample model:
from django.db import models
class MySample(models.Model):
description = models.CharField(max_length=255, null=False)
class Meta:
managed = False
db_table = 'my_samples'
Create a database router to direct all sample-related queries to the sample database:
from database_test.models import MySample
ROUTED_MODELS = [MySample]
class MyDBRouter(object):
def db_for_read(self, model, **hints):
if model in ROUTED_MODELS:
return 'samples'
return None
def db_for_write(self, model, **hints):
if model in ROUTED_MODELS:
return 'samples'
return None
Basically, the router will route all the models specified in ROUTED_MODELS to the database connection samples and return None for all the other models. This will route them to the default database connection.
Finally add the router to your settings.py
DATABASE_ROUTERS = ('database_test.db_router.MyDBRouter',)
And now when doing a query for the MySample model, it will fetch data from the samples schema.
I also consulted that source, but I could not solve it like you, but by performing tests I achieved the following.
If we have for example, the schemas foo and bar, writing in the Meta:
class MySample1 (models.Model):
description = models.CharField (max_length = 255, null = False)
class Goal:
managed = True
db_table = 'fo\".\"my_samples1'
class MySample2 (models.Model):
description = models.CharField (max_length = 255, null = False)
class Goal:
managed = True
db_table = 'bar\".\"my_samples2'
Then we can redirect each model to the scheme we want provided we have the variable managed in True. The limitation is that we have to name the table ourselves.
First create tables in postgres using schemas and then access these tables in django
using command python manage.py inspectdb > models.py then migrate back.

Django ValueError: Can't do subqueries with queries on different DBs

DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'default_db',
'USER': 'user',
'PASSWORD': '123123123',
'HOST': 'localhost',
'PORT': '',
},
'omskgkh': {
'NAME': 'general',
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'USER': 'user',
'PASSWORD': '123123123',
'HOST': '123.123.123.123',
'PORT': '',
}}
in my view:
def districtreport(request):
info = models.InfoAddress.objects.using('general')
kao = info.filter(okrug='КАО').values('home')
kao_accounts = models.Operation.objects.using('general').filter(account_id__home_id=kao)
On dev server with (default database is sqlite3) that view works fine, but on production server Django raise
Exception Value: Can't do subqueries with queries on different DBs.
Please, help.
ADD:
models.py
class Home(models.Model):
id = models.IntegerField(primary_key=True)
...
class Meta:
db_table = "home"
managed = False
class InfoAddress(models.Model):
id = models.IntegerField(primary_key=True)
home = models.ForeignKey(Home)
okrug = models.CharField(max_length=255)
...
class Meta:
db_table = "infoaddress"
managed = False
class Account(models.Model):
id = models.IntegerField(primary_key=True)
home = models.ForeignKey(Home)
...
class Meta:
db_table = "account"
managed = False
class Operation(models.Model):
id = models.IntegerField(primary_key=True)
account = models.ForeignKey(Account)
...
class Meta:
db_table = "account_op"
managed = False
I found one way to solve problem:
Change QuerySet to list and filter with it.
Updating to cacheops 2.4.1 or later will fix it.
A corresponding issue is now fixed.
The problem was caused by 'cacheops' app. Fixed by author.

Django using legacy database - OperationalError 'Unknown column'

I am using a legacy database(MySQL) in my Django project.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'mydb',
'USER': 'root',
'PASSWORD': 'mydbpassword',
'HOST': '127.0.0.1',
'PORT': '3306',
}
}
The syncdb is runs fine. It doesn't have any errors. Whenever I run this command,
>>> from webservice.models import Users
>>> u = Users.objects.all()
>>> print u
I get this error,
OperationalError: (1054, "Unknown column 'users.id' in 'field list'")
Here's my Users model from inspectdb,
class Users(models.Model):
emp_id = models.IntegerField()
username = models.CharField(max_length=128)
email = models.CharField(max_length=128)
status = models.CharField(max_length=5)
class Meta:
managed = False
db_table = 'users'
def __unicode__(self):
return self.username
What am I missing here?
Update
I also tried this solution but doesn't work for me.