Creating a test database through the shell. Missing connection.creation.create_test_db() - django

The way this is supposed to be done is:
from django.db import connection
db = connection.creation.create_test_db() # Create the test db
However, the connection i import has no methods or attributes. It's type is django.db.DefaultConnectionProxy.
In the django/db/__init__.py lies the definition:
class DefaultConnectionProxy(object):
"""
Proxy for accessing the default DatabaseWrapper object's attributes. If you
need to access the DatabaseWrapper object itself, use
connections[DEFAULT_DB_ALIAS] instead.
"""
def __getattr__(self, item):
return getattr(connections[DEFAULT_DB_ALIAS], item)
def __setattr__(self, name, value):
return setattr(connections[DEFAULT_DB_ALIAS], name, value)
I've imported django.db.connections and found it has the following attributes/methods:
connections.all
connections.databases
connections.ensure_defaults
no sign of DEFAULT_DB_ALIAS.
I'm looking for ideas on how to debug this. Wouldn't want to post a ticket to Django if this has something to do with my configuration.

Related

Why does my Django test pass when it should fail?

I am new to testing of any sorts in coding. This is followup on this answer to my question. Answer establishes that this type of model method should not save object to database:
#classmethod
def create(cls, user, name):
list = cls(user=user, name=name)
return list
If this is the case I am curious why does this test pass and says everything is ok?
from django.test import TestCase
from .models import List
from django.contrib.auth.models import User
class ListTestCase(TestCase):
def setUp(self):
user_1 = User(username="test_user", password="abcd")
user_1.save()
List.objects.create(user=user_1, name="mylist")
List.objects.create(user=user_1, name="anotherlist")
def test_lists_is_created(self):
user_1 = User.objects.get(username="test_user")
list_1 = List.objects.get(user=user_1, name="mylist")
self.assertEqual("mylist", list_1.name)
The reason why the test passes is that you call a different method from the one that you've implemented.
The line in ListTestCase.setUp()
List.objects.create(user=user_1, name="mylist")
actually, call the Django's QuerySet.create() method. Notice that it's call via List.objects.create() not List.create(). Therefore, the object is saved in the database and the test passes.
In your case, you've implemented a method create() inside the List model, so you should call List.create().

How to mock redis for Django tests

I am trying to mock out redis in my Django application. I have tried several different methods but none seem to work. What am I doing wrong?
My primary redis instance is called with:
redis_client = redis.from_url(os.environ.get("REDIS_URL"))
That instance is imported in other parts of the app in order to add and retrieve data.
In my tests I tried doing:
import fakeredis
from mock import patch
class TestViews(TestCase):
def setUp(self):
redis_patcher = patch('redis.Redis', fakeredis.FakeRedis)
self.redis = redis_patcher.start()
self.redis.set('UPDATE', 'Spring')
print(redis_client.get('UPDATE'))
def tearDown(self):
self.redis_patcher.stop
When running the tests I want the 'UPDATE' variable to be set. But instead every instance of redis_client fails saying the server is not available. How can I mock out redis and set values, so that they are available when testing my app?
You should mock an item where it is used, not where it came from.
So if redis_client is used in a view like this:
myapp/views.py
from somemodule import redis_client
def some_view_that_uses_redis(request):
result = redis_client(...)
Then in your TestViews you should patch redis_client like this:
class TestViews(TestCase):
def setUp(self):
redis_patcher = patch('myapp.views.redis_client', fakeredis.FakeRedis)
self.redis = redis_patcher.start()

How can i update class variable by calling a class method from a derived class

I am developing a package for my testing purpose called dbtest. This package is because i am using MySQLdb for connecting databases and hence it is very tedious task to write sql queries while testing. So i created a new package and all queries can be accessed with separate functions. I avoided django ORM because my database table have multiple foreign keys and primary keys.
Below present is a part of the package.
package.py
from django.test import TestCase
dbcon='connector'
class testcase(TestCase):
flag_user=[]
#classmethod
def setUpClass(cls):
global dbcon
dbcon=MySQLdb.connect(host=dbHost,port=dbPort,user=dbUser,passwd=dbPasswd,db=dbname)
super(testcase, cls).setUpClass()
cursor = dbcon.cursor()
sql=open("empty.sql").read()
cursor.execute(sql)
cursor.close()
views.MySQLdb=Mockdb()
#classmethod
def tearDownClass(cls):
dbcon.close()
def user_table(self,username=username,email=email):
cache=[username]
self.flag_user.append(cache)
cmpdata=(username,email)
insert_table(tablename_user,cmpdata)
def delete(self,table):
last_entry=self.flag_user[-1]
query_user = 'delete from USER where USERNAME=%s'
cursor=dbcon.cursor()
query=eval('query_%s'%table)
cursor.execute(query,last_entry)
dbcon.commit()
del self.flag_user[-1]
tests.py
from package import testcase
class showfiles(testcase):
def setUp(self):
print "setup2"
self.user_table(username='vishnu',email='vishnu#clartrum.com')
def tearDown(self):
print "teardown2"
self.delete("user")
def test_1(self):
print "test dbtest link feature"
def test_2(self):
print "test health/errorfiles with valid device"
self.user_table(username='vishnu',email='vishnu#clartrum.com')
The insert_table in package execute insert operation in sql and delete method deletes the last entry from user. empty.sql creates tables for the database.
Actually when i run the tests, finally the flag_user should contain only [['vishnu']]. But i get [['vishnu'],['vishnu']] and this is because delete function in teardown doesn't updating the value.
I think this is due to class instances ? Am i right or not?
Here :
class testcase(TestCase):
flag_user=[]
you create flag_user as a class attribute (shared by all instances).
Then here:
def user_table(self,username=username,email=email):
cache=[username]
self.flag_user.append(cache)
You append to the (class level) flag_user attribute (it's accessed thru the instance but it's still the class attribute)
But here:
def delete(self,table):
delete_table(tablename)
self.flag_user=[]
you create a flag_user attribute on the instance itself, which is totally disconnected from the eponym class attribute.
The simplest solution is to use an instance attribute right from the start instead of using a class attribute:
# package.py
from django.test import TestCase
dbcon='connector'
class testcase(TestCase):
def setUp(self):
self.flag_user = []
and don't forget to call testcase.setUp in child classes:
# tests.py
from package import testcase
class showfiles(testcase):
def setUp(self):
super(showfile, self).setUp()
self.user_table(username='vishnu',email='vishnu#clartrum.com')
The alternative solution if you really want a class attribute (I can't imagine why you would but...) is to modify testcase.delete() so it really clears the flag_user class attribute instead of creating an instance attribute, which is done by explicitely asking python to rebind the attribute on the class itself (type(obj) returns obj.__class__ which is the class the instance belongs to):
def delete(self,table):
delete_table(tablename)
type(self).flag_user = []

How to access the parent model of a Django-CMS plugin

I created 2 django-cms plugins, a parent "Container" that can contain multiple child "Content" plugins.
When I save the child plugin I would like to access the model of the parent plugin.
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from .models import Container, Content
class ContainerPlugin(CMSPluginBase):
model = Container
name = "Foo Container"
render_template = "my_package/container.html"
allow_children = True
child_classes = ["ContentPlugin"]
class ContentPlugin(CMSPluginBase):
model = content
name = "Bar Content"
render_template = "my_package/content.html"
require_parent = True
parent_classes = ["ContainerPlugin"]
allow_children = True
def save_model(self, request, obj, form, change):
response = super(ContentPlugin, self).save_model(
request, obj, form, change
)
# here I want to access the parent's (container) model, but how?
return response
plugin_pool.register_plugin(ContainerPlugin)
plugin_pool.register_plugin(ContentPlugin)
obj is the current plugin instance, so I can get all the properties of this model, but I can't figure out how to access the parent's plugin model. There is obj.parent, but it's not the plugin instance as far as I can tell. Also tried playing around with self.cms_plugin_instance and obj.parent.get_plugin_instance() but with no success.
Any advice?
Given a plugin instance,instance.get_plugin_instance() method returns a tuple containing:
instance - The plugin instance
plugin - the associated plugin class instance
get_plugin_instance
so something like this to get the parent object:
instance, plugin_class = object.parent.get_plugin_instance()
Option 1
Looking at the source code of CMSPluginBase, you might be able to use the implementation of get_child_classes. Unfortunately, that method really only returns the class names, so you cannot use it directly. But I think it actually does iterate over the child instances to get the class names:
def get_child_classes(self, slot, page):
from cms.utils.placeholder import get_placeholder_conf
template = page and page.get_template() or None
# config overrides..
ph_conf = get_placeholder_conf('child_classes', slot, template, default={})
child_classes = ph_conf.get(self.__class__.__name__, self.child_classes)
if child_classes:
return child_classes
from cms.plugin_pool import plugin_pool
installed_plugins = plugin_pool.get_all_plugins(slot, page)
return [cls.__name__ for cls in installed_plugins]
What you'd be interested in would be these two lines:
from cms.plugin_pool import plugin_pool
installed_plugins = plugin_pool.get_all_plugins(slot, page)
Option 2
Another way (the one I am using in my code) is to use signals, though this also requires finding the correct objects. The code is not very readable imho (see my lingering inline comments), but it works. It was written a while ago but I am still using it with django-cms 3.2.3.
The placeholder names really are the names that you have configured for your placeholders. It's certainly preferable to move that into the settings or somewhere. I'm not sure why I haven't done that, though.
I'd be interested in your solution!
# signals.py
import itertools
import logging
from cms.models import CMSPlugin
from cms.plugin_pool import plugin_pool
from django.db import ProgrammingError
from django.db.models.signals import post_save
logger = logging.getLogger(__name__)
_registered_plugins = [CMSPlugin.__name__]
def on_placeholder_saved(sender, instance, created, raw, using, update_fields, **kwargs):
"""
:param sender: Placeholder
:param instance: instance of Placeholder
"""
logger.debug("Placeholder SAVED: %s by sender %s", instance, sender)
# TODO this is totally ugly - is there no generic way to find out the related names?
placeholder_names = [
'topicintro_abstracts',
'topicintro_contents',
'topicintro_links',
'glossaryentry_explanations',
]
fetch_phs = lambda ph_name: _fetch_qs_as_list(instance, ph_name)
container = list(itertools.chain.from_iterable(map(fetch_phs, placeholder_names)))
logger.debug("Modified Placeholder Containers %s (%s)", container, placeholder_names)
if container:
if len(container) > 1:
raise ProgrammingError("Several Containers use the same placeholder.")
else:
# TODO change modified_by (if possible?)
container[0].save()
def _fetch_qs_as_list(instance, field):
"""
:param instance: a model
:param field: optional field (might not exist on model)
:return: the field values as list (not as RelatedManager)
"""
qs = getattr(instance, field)
fields = qs.all() if qs else []
return fields
def on_cmsplugin_saved(sender, instance, created, raw, using, update_fields, **kwargs):
"""
:param sender: CMSPlugin or subclass
:param instance: instance of CMSPlugin
"""
plugin_class = instance.get_plugin_class()
logger.debug("CMSPlugin SAVED: %s; plugin class: %s", instance, plugin_class)
if not plugin_class.name in _registered_plugins:
post_save.connect(on_cmsplugin_saved, sender=plugin_class)
_registered_plugins.append(plugin_class.name)
logger.info("Registered post_save listener with %s", plugin_class.name)
on_placeholder_saved(sender, instance.placeholder, created, raw, using, update_fields)
def connect_existing_plugins():
plugin_types = CMSPlugin.objects.order_by('plugin_type').values_list('plugin_type').distinct()
for plugin_type in plugin_types:
plugin_type = plugin_type[0]
if not plugin_type in _registered_plugins:
plugin_class = plugin_pool.get_plugin(plugin_type)
post_save.connect(on_cmsplugin_saved, sender=plugin_class)
post_save.connect(on_cmsplugin_saved, sender=plugin_class.model)
_registered_plugins.append(plugin_type)
_registered_plugins.append(plugin_class.model.__name__)
logger.debug("INIT registered plugins: %s", _registered_plugins)
post_save.connect(on_cmsplugin_saved, sender=CMSPlugin)
You have to setup these signals somewhere. I'm doing this in my urls.py, though the app config might be the suitable location for it? (I'm trying to avoid app configs.)
# This code has to run at server startup (and not during migrate if avoidable)
try:
signals.connect_existing_plugins()
except db.utils.ProgrammingError:
logger.warn('Failed to setup signals (if your DB is not setup (not tables), you can savely ignore this error.')
By the fact that children plugins always inherit the context. In the parent template you be able to do:
{% with something=instance.some_parent_field %}
{% for plugin in instance.child_plugin_instances %}
{% render_plugin plugin %}
{% endfor %}
{% endwith %}
And use something in your child template.

Django with Pluggable MongoDB Storage troubles

I'm trying to use django, and mongoengine to provide the storage backend only with GridFS. I still have a MySQL database.
I'm running into a strange (to me) error when I'm deleting from the django admin and am wondering if I am doing something incorrectly.
my code looks like this:
# settings.py
from mongoengine import connect
connect("mongo_storage")
# models.py
from mongoengine.django.storage import GridFSStorage
class MyFile(models.Model):
name = models.CharField(max_length=50)
content = models.FileField(upload_to="appsfiles", storage=GridFSStorage())
creation_time = models.DateTimeField(auto_now_add=True)
last_update_time = models.DateTimeField(auto_now=True)
I am able to upload files just fine, but when I delete them, something seems to break and the mongo database seems to get in an unworkable state until I manually delete all FileDocument.objects. When this happens I can't upload files or delete them from the django interface.
From the stack trace I have:
/home/projects/vector/src/mongoengine/django/storage.py in _get_doc_with_name
doc = [d for d in docs if getattr(d, self.field).name == name] ...
▼ Local vars
Variable Value
_[1]
[]
d
docs
Error in formatting: cannot set options after executing query
name
u'testfile.pdf'
self
/home/projects/vector/src/mongoengine/fields.py in __getattr__
raise AttributeError
Am I using this feature incorrectly?
UPDATE:
thanks to #zeekay's answer I was able to get a working gridfs storage plugin to work. I ended up not using mongoengine at all. I put my adapted solution on github. There is a clear sample project showing how to use it. I also uploaded the project to pypi.
Another Update:
I'd highly recommend the django-storages project. It has lots of storage backed options and is used by many more people than my original proposed solution.
I think you are better off not using MongoEngine for this, I haven't had much luck with it either. Here is a drop-in replacement for mongoengine.django.storage.GridFSStorage, which works with the admin.
from django.core.files.storage import Storage
from django.conf import settings
from pymongo import Connection
from gridfs import GridFS
class GridFSStorage(Storage):
def __init__(self, host='localhost', port=27017, collection='fs'):
for s in ('host', 'port', 'collection'):
name = 'GRIDFS_' + s.upper()
if hasattr(settings, name):
setattr(self, s, getattr(settings, name))
for s, v in zip(('host', 'port', 'collection'), (host, port, collection)):
if v:
setattr(self, s, v)
self.db = Connection(host=self.host, port=self.port)[self.collection]
self.fs = GridFS(self.db)
def _save(self, name, content):
self.fs.put(content, filename=name)
return name
def _open(self, name, *args, **kwars):
return self.fs.get_last_version(filename=name)
def delete(self, name):
oid = fs.get_last_version(filename=name)._id
self.fs.delete(oid)
def exists(self, name):
return self.fs.exists({'filename': name})
def size(self, name):
return self.fs.get_last_version(filename=name).length
GRIDFS_HOST, GRIDFS_PORT and GRIDFS_COLLECTION can be defined in your settings or passed as host, port, collection keyword arguments to GridFSStorage in your model's FileField.
I referred to Django's custom storage documenation, and loosely followed this answer to a similar question.