djongo RunPython in database migration - django

There's a django app with djongo as database driver. I want to add custom migration using RunPython. But I can't understand how to reach pymongo client. Here's the code:
from django.db import migrations
def _custom_migration(apps, schema_editor):
db = ... # what to put here?
db.collection1.update_many({}, [{'$set': {"field2": "$field1.id"}}])
class Migration(migrations.Migration):
operations = [
migrations.RunPython(_custom_migration),
]

You can reach PyMongo:
client (MongoClient instance) from schema_editor.connection.client_connection.
db (Database instance) from schema_editor.connection.connection.
def _custom_migration(apps, schema_editor):
db = schema_editor.connection.connection
db.collection1.update_many({}, [{'$set': {"field2": "$field1.id"}}])
Source references:
schema_editor.connection holds the DatabaseWrapper instance.
DatabaseWrapper.client_connection holds the MongoClient instance.
(Base)DatabaseWrapper.connection holds the Database instance.

from documentation https://pymongo.readthedocs.io/en/stable/tutorial.html try this:
from pymongo import MongoClient
def _custom_migration(apps, schema_editor):
client = MongoClient('localhost', 27017)
db = client.test_database
db.collection1.update_many({}, [{'$set': {"field2": "$field1.id"}}])

Related

how to keep data created in the ready method? Django production vs test database

As you know django give you clear database in testing, but I have a ready() method that create some data for me and I need to query these data in my tests.
class YourAppConfig(AppConfig):
default_auto_field = 'django.db.models.AutoField'
name = 'Functions.MyAppsConfig'
def ready(self):
from django.contrib.auth.models import Permission
from django import apps
from django.contrib.contenttypes.models import ContentType
try:
Permission.objects.get_or_create(....)
MyOtherModel.objects.get_or_create(....)
except:
pass
class TestRules(APITestCase):
def test_my_model(self):
....
x = MyOtherModel.objects.filter(....).first()
# x = None # <=========== problem is here ========= I need to see the data that I created in the ready method
....
You can use the fixtures for that, in each Test case you can fixtures to it as stated documentation example is
class Test(TransactionTestCase):
fixtures = ['user-data.json']
def setUp():
…
Django will load the fixtures before under test case

Flask-Migrate - 'Please edit configuratio/connection/logging settings' on 'flask db init'

Have spent literally the past three days trying to figure this out and just can't seem to get it. Have reviewed other posts here and elsewhere regarding the same issue (see here, here, here, among others) countless times and am apparently just too dense to figure it out myself. So here we are, any help would be greatly appreciated.
Project structure:
/project-path/
/app.py
/config.py
/.flaskenv
/app/__init__.py
/app/models.py
/app/routes.py
config.py
import os
from dotenv import load_dotenv
basedir = os.path.abspath(os.path.dirname(__file__))
load_dotenv(os.path.join(basedir, '.env'))
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY') or 'bleh'
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or 'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
app.py
from app import create_app
app = create_app()
.flaskenv
FLASK_APP=app.py
/app/init.py
import os
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_marshmallow import Marshmallow
from config import Config
db = SQLAlchemy()
migrate = Migrate()
ma = Marshmallow()
def create_app(config_class=Config):
app = Flask(__name__)
app.config.from_object(config_class)
from app.models import Borrower
db.init_app(app)
migrate.init_app(app, db)
from app.routes import borrowers_api
app.register_blueprint(borrowers_api)
return app
/app/models.py
from app import db, ma
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
class Borrower(db.Model):
__tablename__ = 'borrowers'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
borrower_name = db.Column(db.String(100), nullable=False, unique=True, index=True)
def __init__(self, borrower_name):
self.borrower_name = borrower_name
class BorrowerSchema(ma.SQLAlchemyAutoSchema):
class Meta:
model = Borrower
include_relationships = True
load_instance = True #Optional: deserialize to model instances
borrower_schema = BorrowerSchema()
borrowers_schema = BorrowerSchema(many=True)
/app/routes.py
from flask import Blueprint, request, jsonify
from app.models import Borrower, borrower_schema, borrowers_schema
from app import db
borrowers_api = Blueprint('borrowers_api', __name__)
# Add a borrower
#borrowers_api.route('/borrower', methods=['POST'])
def add_borrower():
borrower_name = request.get_json(force=True)['borrower_name']
new_borrower = Borrower(borrower_name)
db.session.add(new_borrower)
db.session.commit()
return borrower_schema.jsonify(new_borrower)
# Get single borrower by id
#borrowers_api.route('/borrower/<id>', methods=['GET'])
def get_borrower(id):
borrower = Borrower.query.get(id)
return borrower_schema.jsonify(borrower)
Yet all I continue to run into the following:
(env-name) C:\Users\...\flask-scratchwork\flask-restapi-tryagain>flask db init
Creating directory C:\Users\...\flask-scratchwork\flask-restapi-tryagain\migrations ... done
Creating directory C:\Users\...\flask-scratchwork\flask-restapi-tryagain\migrations\versions ... done
Generating C:\Users\...\flask-scratchwork\flask-restapi-tryagain\migrations\alembic.ini ... done
Generating C:\Users\...\flask-scratchwork\flask-restapi-tryagain\migrations\env.py ... done
Generating C:\Users\...\flask-scratchwork\flask-restapi-tryagain\migrations\README ... done
Generating C:\Users\...\flask-scratchwork\flask-restapi-tryagain\migrations\script.py.mako ... done
Please edit configuration/connection/logging settings in 'C:\\Users\\...\\flask-scratchwork\\flask-restapi-tryagain\\migrations\\alembic.ini' before proceeding.
Where am I going wrong?
Nevermind, think the answer was actually just proceed with flask db migrate despite:
Please edit configuration/connection/logging settings in 'C:\\Users\\...\\flask-scratchwork\\flask-restapi-tryagain\\migrations\\alembic.ini' before proceeding.
So for anyone else spending hours trying to discern precisely what type of edits flask-migrate wants you to make to to the "configuration/connection/logging settings" in alembic.ini...the answer is seemingly to just proceed with flask db migrate

Graphene-Django: In schema combine Query-objects (only takes first argument)

I am trying to combine multiple Query schemas located in different apps in Django 2.1. Using graphene-django 2.2 (have tried 2.1 with same problem). Python 3.7.
The Query class only registers the first variable. As an example shop.schema.Query.
import graphene
import graphql_jwt
from django.conf import settings
import about.schema
import shop.schema
import landingpage.schema
class Query(about.schema.Query, shop.schema.Query, landingpage.schema.Query, graphene.ObjectType):
pass
class Mutation(shop.schema.Mutation, graphene.ObjectType):
token_auth = graphql_jwt.ObtainJSONWebToken.Field()
verify_token = graphql_jwt.Verify.Field()
refresh_token = graphql_jwt.Refresh.Field()
schema = graphene.Schema(query=Query, mutation=Mutation)
Why is it like this? Have something changed with classes in python 3.7? The graphene tutorial says this will inherit for multiple...
class Query(cookbook.ingredients.schema.Query, graphene.ObjectType):
# This class will inherit from multiple Queries
# as we begin to add more apps to our project
pass
schema = graphene.Schema(query=Query)
I am exporting my schema to schema.json for using it with react relay. I do find my object "collection" Query schema from landingpage(the 3. variable). Relay returns:
ERROR: GraphQLParser: Unknown field collection on type Viewer.
Source: document AppQuery file: containers/App/index.js.
Is it a problem with Relay reading my schema.json?
I managed to solve it shortly after writing this. My problem was that I had a Viewer object in every app. Because I find it useful to have a viewer-graphql-root, like this:
graphql'
viewer {
collection {
somestuff
}
}
'
I moved the Viewer object up to the root schema.py like this:
class Viewer(about.schema.Query, landingpage.schema.Query, shop.schema.Query, graphene.ObjectType):
class Meta:
interfaces = [relay.Node, ]
class Query(graphene.ObjectType):
viewer = graphene.Field(Viewer)
def resolve_viewer(self, info, **kwargs):
return Viewer()
class Mutation(shop.schema.Mutation, graphene.ObjectType):
token_auth = graphql_jwt.ObtainJSONWebToken.Field()
verify_token = graphql_jwt.Verify.Field()
refresh_token = graphql_jwt.Refresh.Field()
schema = graphene.Schema(query=Query, mutation=Mutation)
In setting.py add a new file as schema.py
Combine your Queries and Mutations in schema.py as follows:
import graphene
import about.schema as about
import shop.schema as projects
import landingpage.schema as projects
then add:
class Query(about.schema.Query, shop.schema.Query, landingpage.schema.Query, graphene.ObjectType):
pass
class Mutation(about.schema.Mutation, shop.schema.Mutation, landingpage.schema.Mutation, graphene.ObjectType):
pass
schema = graphene.Schema(query=Query, mutation=Mutation)
Configure your combined schema in settings.py as follows:
GRAPHENE = {
"SCHEMA": "core.schema.schema",
}

Enable integrity checking with sqlite in django

In my django project, I use mysql db for production, and sqlite for tests.
Problem is, some of my code rely on model integrity checking. It works well with mysql, but integrity errors are not thrown when the same code is executed in tests.
I know that foreign keys checking must be activated in sqlite :
PRAGMA foreign_keys = 1;
However, I don't know where is the best way to do this activation (same question here).
Moreover, the following code won't work :
def test_method(self):
from django.db import connection
cursor = connection.cursor()
cursor.execute('PRAGMA foreign_keys = ON')
c = cursor.execute('PRAGMA foreign_keys')
print c.fetchone()
>>> (0,)
Any ideas?
So, if finally found the correct answer. All I had to do was to add this code in the __init__.py file in one of my installed app:
from django.db.backends.signals import connection_created
def activate_foreign_keys(sender, connection, **kwargs):
"""Enable integrity constraint with sqlite."""
if connection.vendor == 'sqlite':
cursor = connection.cursor()
cursor.execute('PRAGMA foreign_keys = ON;')
connection_created.connect(activate_foreign_keys)
You could use django signals, listening to post_syncdb.
from django.db.models.signals import post_syncdb
def set_pragma_on(sender, **kwargs):
"your code here"
post_syncdb.connect(set_pragma_on)
This ensures that whenever syncdb is run (syncdb is run, when creating the test database), that your SQLite database has set 'pragma' to 'on'. You should check which database you are using in the above method 'set_pragma_on'.

Choose test database?

I'm trying to run
./manage.py test
But it tells me
Got an error creating the test database: permission denied to create database
Obviously it doesn't have permission to create the database, but I'm on a shared server, so there's not much I can do about that. I can create a new database through the control panel but I don't think there's any way I can let Django do it automatically.
So, can't I create the test database manually and instead tell Django to flush it every time, rather than recreating the whole thing?
I had a similar issue. But I wanted Django to just bypass the creation of a test database for one of my instances (it is not a mirror tough). Following Mark's suggestion, I created a custom test runner, as follows
from django.test.simple import DjangoTestSuiteRunner
class ByPassableDBDjangoTestSuiteRunner(DjangoTestSuiteRunner):
def setup_databases(self, **kwargs):
from django.db import connections
old_names = []
mirrors = []
for alias in connections:
connection = connections[alias]
# If the database is a test mirror, redirect its connection
# instead of creating a test database.
if connection.settings_dict['TEST_MIRROR']:
mirrors.append((alias, connection))
mirror_alias = connection.settings_dict['TEST_MIRROR']
connections._connections[alias] = connections[mirror_alias]
elif connection.settings_dict.get('BYPASS_CREATION','no') == 'no':
old_names.append((connection, connection.settings_dict['NAME']))
connection.creation.create_test_db(self.verbosity, autoclobber=not self.interactive)
return old_names, mirrors
Then I created an extra dict entry in one of my databases entries inside settings.py, 'BYPASS_CREATION':'yes',
Finally, I configured a new TestRunner with
TEST_RUNNER = 'auth.data.runner.ByPassableDBDjangoTestSuiteRunner'
I would suggest using sqlite3 for testing purposes while keeping on using mysql/postgres/etc for production.
This can be achieved by placing this in your settings file:
if 'test' in sys.argv:
DATABASES['default'] = {'ENGINE': 'django.db.backends.sqlite3'}
see Running django tests with sqlite
a temporary sqlite database file will be created in your django project home which you will have write access to. The other advantage is that sqlite3 is much faster for testing. You may however run in to problems if you are using any mysql/postgres specific raw sql (which you should try to avoid anyway).
I think a better solution might be to define your own test runner.
I added this to the comments above but it got kind of lost - recent changes to webfaction make this MUCH easier. You can now create new private database instances.
Follow the instructions there, and when creating a new user make sure to give them the permission to ALTER USER new_username CREATEDB;.
You probably also should change the default cron settings so they don't try to check if this database is up and runnings as frequently.
You could use django-nose as your TEST_RUNNER. Once installed, if you pass the following environment variable, it will not delete and re-create the database (create it manually yourself first).
REUSE_DB=1 ./manage.py test
You can also add the following to settings.py so you don't have to write REUSE_DB=1 every time you want to run tests:
os.environ['REUSE_DB'] = "1"
Note: this will also leave all your tables in the databases which means test setup will be a little quicker, but you will have to manually update the tables (or delete and re-create the database yourself) when you change your models.
my variant to reusing database:
from django.test.simple import DjangoTestSuiteRunner
from django.core.management import call_command
class TestRunner(DjangoTestSuiteRunner):
def setup_databases(self, **kwargs):
from django.db import connections
settings = connections['default'].settings_dict
settings['NAME'] = settings['TEST_NAME']
settings['USER'] = settings['TEST_USER']
settings['PASSWORD'] = settings['TEST_PASSWD']
call_command('syncdb', verbosity=1, interactive=False, load_initial_data=False)
def teardown_databases(self, old_config, **kwargs):
from django.db import connection
cursor = connection.cursor()
cursor.execute('show tables;')
parts = ('DROP TABLE IF EXISTS %s;' % table for (table,) in cursor.fetchall())
sql = 'SET FOREIGN_KEY_CHECKS = 0;\n' + '\n'.join(parts) + 'SET FOREIGN_KEY_CHECKS = 1;\n'
connection.cursor().execute(sql)
The following is a django test suite runner to create database using Webfaction XML-RPC API. Note, setting up the database using the API may take up to a minute, and the script may appear to be stuck momentarily, just wait for a little while.
NOTE: there is a security risk of having control panel password in the webfaction server, because someone breaching into your web server SSH could take over your Webfaction account. If that is a concern, set USE_SESSKEY to True and use the fabric script below this script to pass a session id to the server. The session key expires in 1 hour from the last API call.
File test_runner.py: in the server, you need to configure ./manage.py test to use WebfactionTestRunner
"""
This test runner uses Webfaction XML-RPC API to create and destroy database
"""
# you can put your control panel username and password here.
# NOTE: there is a security risk of having control panel password in
# the webfaction server, because someone breaching into your web server
# SSH could take over your Webfaction account. If that is a concern,
# set USE_SESSKEY to True and use the fabric script below this script to
# generate a session.
USE_SESSKEY = True
# CP_USERNAME = 'webfactionusername' # required if and only if USE_SESSKEY is False
# CP_PASSWORD = 'webfactionpassword' # required if and only if USE_SESSKEY is False
import sys
import os
from django.test.simple import DjangoTestSuiteRunner
from django import db
from webfaction import Webfaction
def get_sesskey():
f = os.path.expanduser("~/sesskey")
sesskey = open(f).read().strip()
os.remove(f)
return sesskey
if USE_SESSKEY:
wf = Webfaction(get_sesskey())
else:
wf = Webfaction()
wf.login(CP_USERNAME, CP_PASSWORD)
def get_db_user_and_type(connection):
db_types = {
'django.db.backends.postgresql_psycopg2': 'postgresql',
'django.db.backends.mysql': 'mysql',
}
return (
connection.settings_dict['USER'],
db_types[connection.settings_dict['ENGINE']],
)
def _create_test_db(self, verbosity, autoclobber):
"""
Internal implementation - creates the test db tables.
"""
test_database_name = self._get_test_db_name()
db_user, db_type = get_db_user_and_type(self.connection)
try:
wf.create_db(db_user, test_database_name, db_type)
except Exception as e:
sys.stderr.write(
"Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = raw_input(
"Type 'yes' if you would like to try deleting the test "
"database '%s', or 'no' to cancel: " % test_database_name)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print("Destroying old test database '%s'..."
% self.connection.alias)
wf.delete_db(test_database_name, db_type)
wf.create_db(db_user, test_database_name, db_type)
except Exception as e:
sys.stderr.write(
"Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled.")
sys.exit(1)
db.close_connection()
return test_database_name
def _destroy_test_db(self, test_database_name, verbosity):
"""
Internal implementation - remove the test db tables.
"""
db_user, db_type = get_db_user_and_type(self.connection)
wf.delete_db(test_database_name, db_type)
self.connection.close()
class WebfactionTestRunner(DjangoTestSuiteRunner):
def __init__(self, *args, **kwargs):
# Monkey patch BaseDatabaseCreation with our own version
from django.db.backends.creation import BaseDatabaseCreation
BaseDatabaseCreation._create_test_db = _create_test_db
BaseDatabaseCreation._destroy_test_db = _destroy_test_db
return super(WebfactionTestRunner, self).__init__(*args, **kwargs)
File webfaction.py: this is a thin wrapper for Webfaction API, it need to be importable by both test_runner.py (in the remote server) and the fabfile.py (in the local machine)
import xmlrpclib
class Webfaction(object):
def __init__(self, sesskey=None):
self.connection = xmlrpclib.ServerProxy("https://api.webfaction.com/")
self.sesskey = sesskey
def login(self, username, password):
self.sesskey, _ = self.connection.login(username, password)
def create_db(self, db_user, db_name, db_type):
""" Create a database owned by db_user """
self.connection.create_db(self.sesskey, db_name, db_type, 'unused')
# deletes the default user created by Webfaction API
self.connection.make_user_owner_of_db(self.sesskey, db_user, db_name, db_type)
self.connection.delete_db_user(self.sesskey, db_name, db_type)
def delete_db(self, db_name, db_type):
try:
self.connection.delete_db_user(self.sesskey, db_name, db_type)
except xmlrpclib.Fault as e:
print 'ignored error:', e
try:
self.connection.delete_db(self.sesskey, db_name, db_type)
except xmlrpclib.Fault as e:
print 'ignored error:', e
File fabfile.py: A sample fabric script to generate session key, needed only if USE_SESSKEY=True
from fabric.api import *
from fabric.operations import run, put
from webfaction import Webfaction
import io
env.hosts = ["webfactionusername#webfactionusername.webfactional.com"]
env.password = "webfactionpassword"
def run_test():
wf = Webfaction()
wf.login(env.hosts[0].split('#')[0], env.password)
sesskey_file = '~/sesskey'
sesskey = wf.sesskey
try:
put(io.StringIO(unicode(sesskey)), sesskey_file, mode='0600')
# put your test code here
# e.g. run('DJANGO_SETTINGS_MODULE=settings /path/to/virtualenv/python /path/to/manage.py test --testrunner=test_runner.WebfactionTestRunner')
raise Exception('write your test here')
finally:
run("rm -f %s" % sesskey_file)
The accepted answer didn't work for me. It's so outdated, that it didn't run on my legacy codebase with djano 1.5.
I wrote a blogpost entirely describing how I solved this issue by creating an alternative test runner and changing django settings to provide all the required config and to use new test runner.
You need to specify a sqlite ENGINE when using unit tests. Open the settings.py and add the just after DATABASES section:
import sys
if 'test' in sys.argv or 'test_coverage' in sys.argv: #Covers regular testing and django-coverage
DATABASES['default']['ENGINE'] = 'django.db.backends.sqlite3'
DATABASES['default']['NAME'] = ':memory:'
Modify the following methods in django/db/backends/creation.py:
def _destroy_test_db(self, test_database_name, verbosity):
"Internal implementation - remove the test db tables."
# Remove the test database to clean up after
# ourselves. Connect to the previous database (not the test database)
# to do so, because it's not allowed to delete a database while being
# connected to it.
self._set_test_dict()
cursor = self.connection.cursor()
self.set_autocommit()
time.sleep(1) # To avoid "database is being accessed by other users" errors.
cursor.execute("""SELECT table_name FROM information_schema.tables WHERE table_schema='public'""")
rows = cursor.fetchall()
for row in rows:
try:
print "Dropping table '%s'" % row[0]
cursor.execute('drop table %s cascade ' % row[0])
except:
print "Couldn't drop '%s'" % row[0]
#cursor.execute("DROP DATABASE %s" % self.connection.ops.quote_name(test_database_name))
self.connection.close()
def _create_test_db(self, verbosity, autoclobber):
"Internal implementation - creates the test db tables."
suffix = self.sql_table_creation_suffix()
if self.connection.settings_dict['TEST_NAME']:
test_database_name = self.connection.settings_dict['TEST_NAME']
else:
test_database_name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
qn = self.connection.ops.quote_name
# Create the test database and connect to it. We need to autocommit
# if the database supports it because PostgreSQL doesn't allow
# CREATE/DROP DATABASE statements within transactions.
self._set_test_dict()
cursor = self.connection.cursor()
self.set_autocommit()
return test_database_name
def _set_test_dict(self):
if "TEST_NAME" in self.connection.settings_dict:
self.connection.settings_dict["NAME"] = self.connection.settings_dict["TEST_NAME"]
if "TEST_USER" in self.connection.settings_dict:
self.connection.settings_dict['USER'] = self.connection.settings_dict["TEST_USER"]
if "TEST_PASSWORD" in self.connection.settings_dict:
self.connection.settings_dict['PASSWORD'] = self.connection.settings_dict["TEST_PASSWORD"]
Seems to work... just add the extra settings to your settings.py if you need 'em.
Simple workaround: change TEST_DATABASE_PREFIX in django/db/backends/base/creation.py as you like.