Flask disable CSRF in unittest - flask

In my projects __init__.py I have this:
app = Flask(__name__)
app.config.from_object('config')
CsrfProtect(app)
db = SQLAlchemy(app)
My development config file looks like:
import os
basedir = os.path.abspath(os.path.dirname(__file__))
DEBUG = True
WTF_CSRF_ENABLED = True
SECRET_KEY = 'supersecretkey'
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'project.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
And in my unittest setUp I have this:
from project import app, db
class ExampleTest(unittest.TestCase):
def setUp(self):
app.config['TESTING'] = True
app.config['WTF_CSRF_ENABLED'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://'
self.app = app.test_client()
db.create_all()
In theory, setting WTF_CSRF_ENABLED to False here should prevent CSRF for the unit tests, however I'm still getting CSRF errors if I do a POST while unit testing. I think it is because I have already called CsrfProtect(app) while WTF_CSRF_ENABLED is True (when I import app, it is called). If I set WTF_CSRF_ENABLED = False in the config file, it works as expected.
Is there anyway I can disable CSRF after it has already been enabled? Or am I barking up the wrong tree here?

You can disable it using the config variable WTF_CSRF_ENABLED,
for example
class TestConfig(Config):
TESTING = True
WTF_CSRF_ENABLED = False
...
or app.config['WTF_CSRF_ENABLED'] = False
See also flask-WTF documentation

Looking at the code for csrf_protect, it checks app.config['WTF_CSRF_METHODS'] every time a request comes in to see if this request type should be CSRF protected. By default the protected methods are:
app.config.setdefault('WTF_CSRF_METHODS', ['POST', 'PUT', 'PATCH'])
Because it actually checks the app.config every time, simply changing this to an empty list in my unit tests setUp resolves the issue:
from project import app, db
class ExampleTest(unittest.TestCase):
def setUp(self):
app.config['TESTING'] = True
app.config['WTF_CSRF_METHODS'] = [] # This is the magic
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://'
self.app = app.test_client()
db.create_all()
Alternetly, it does register the csrf protection with app.before_request(), so I think it may be possible to unregister it by modifying the before request functions. But I think going that route would be more likely to see problems on future updates.

It wasn't too hard to keep the csrf_token if that is an option. I was able to successfully log into an application that used a csrf_token using some regular expressions and using the login function that is found in the Flask docs about testing.
def login(self, username, password):
rv = self.client.get('/login')
m = re.search(b'(<input id="csrf_token" name="csrf_token" type="hidden" value=")([-A-Za-z.0-9]+)', rv.data)
return self.client.post('/login', data=dict(
userName=username,
password=password,
csrf_token=m.group(2).decode("utf-8")
), follow_redirects=True)
So what I have done here is made the csrf_token be apart of the second capture group. This could easily be used to find a token all over the application.

Related

Django unittest with legacy database connection

I have a Django project that pulls data from legacy database (read only connection) into its own database, and when I run integration tests, it tries to read from test_account on legacy connection.
(1049, "Unknown database 'test_account'")
Is there a way to tell Django to leave the legacy connection alone for reading from the test database?
I actually wrote something that lets you create integration test in djenga (available on pypi) if you want to take a look at how to create a separate integration test framework.
Here is the test runner I use when using the django unit test framework:
from django.test.runner import DiscoverRunner
from django.apps import apps
import sys
class UnManagedModelTestRunner(DiscoverRunner):
"""
Test runner that uses a legacy database connection for the duration of the test run.
Many thanks to the Caktus Group: https://www.caktusgroup.com/blog/2013/10/02/skipping-test-db-creation/
"""
def __init__(self, *args, **kwargs):
super(UnManagedModelTestRunner, self).__init__(*args, **kwargs)
self.unmanaged_models = None
self.test_connection = None
self.live_connection = None
self.old_names = None
def setup_databases(self, **kwargs):
# override keepdb so that we don't accidentally overwrite our existing legacy database
self.keepdb = True
# set the Test DB name to the current DB name, which makes this more of an
# integration test, but HEY, at least it's a start
DATABASES['legacy']['TEST'] = { 'NAME': DATABASES['legacy']['NAME'] }
result = super(UnManagedModelTestRunner, self).setup_databases(**kwargs)
return result
# Set Django's test runner to the custom class defined above
TEST_RUNNER = 'config.settings.test_settings.UnManagedModelTestRunner'
TEST_NON_SERIALIZED_APPS = [ 'legacy_app' ]
from django.test import TestCase, override_settings
#override_settings(LOGIN_URL='/other/login/')
class LoginTestCase(TestCase):
def test_login(self):
response = self.client.get('/sekrit/')
self.assertRedirects(response, '/other/login/?next=/sekrit/')
https://docs.djangoproject.com/en/1.10/topics/testing/tools/
You should theoretically be able to use the override settings here and switch to a dif

django functional testing with selenium database creation

I am following the TddjangoTutorial. My question is that when I visit
localhost:8000/admin/polls/poll to create a new poll, it complains that the poll table is not there. This is fixed by running the syncdb command, which creates the table. But when I ran selenium test before running the command, it worked fine. The test opens localhost:8081/admin/polls/poll. It showed the page to add a new poll. Does the functional test automatically create this table?
Code for functional test:
from django.test import LiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from userFactory import UserFactory
class PollsTest(LiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.user = UserFactory.create()
def tearDown(self):
self.browser.quit()
def test_can_create_new_poll_via_admin_site(self):
self.browser.get(self.live_server_url+'/admin/')
body = self.browser.find_element_by_tag_name('body')
self.assertIn('Django administration', body.text)
username_field = self.browser.find_element_by_name('username')
username_field.send_keys(self.user.username)
password_field = self.browser.find_element_by_name('password')
password_field.send_keys('adm1n')
password_field.send_keys(Keys.ENTER)
body = self.browser.find_element_by_tag_name('body')
self.assertIn('Site administration', body.text)
polls_links = self.browser.find_elements_by_link_text('Polls')
self.assertEqual(len(polls_links), 2)
polls_links[1].click()
#the user is taken to the polls listing page, which shows no polls yet
body = self.browser.find_element_by_tag_name('body')
self.assertIn('0 polls', body.text)
# the user clicks on add to add a new poll
new_poll_link = self.browser.find_element_by_link_text('Add poll')
new_poll_link.click()
body = self.browser.find_element_by_tag_name('body')
self.assertIn('Question:', body.text)
self.assertIn('Date published:', body.text)
As you can read in the original doc: https://docs.djangoproject.com/en/dev/topics/testing/overview/#the-test-database
Tests that require a database (namely, model tests) will not use your “real” (production) database. Separate, blank databases are created for the tests.
Regardless of whether the tests pass or fail, the test databases are destroyed when all the tests have been executed.

nose #with_setup settings fail to be used in the test

using nose to test a flask app. I'm trying to use the with_setup decorator to make my test DRY, without having to repeat the setup every test function. but it doesn't seem to run the #with_setup phase. As the docs state I'm using it with test functions and not test classes. some code:
from flask import *
from app import app
from nose.tools import eq_, assert_true
from nose import with_setup
testapp = app.test_client()
def setup():
app.config['TESTING'] = True
RUNNING_LOCAL = True
RUN_FOLDER = os.path.dirname(os.path.realpath(__file__))
fixture = {'html_hash':'aaaa'} #mocking the hash
def teardown():
app.config['TESTING'] = False
RUNNING_LOCAL = False
#with_setup(setup, teardown)
def test_scrape_wellformed_html():
#RUN_FOLDER = os.path.dirname(os.path.realpath(__file__)) #if it is here instead of inside #with_setup the code works..
#fixture = {'html_hash':'aaaa'} #mocking the hash #if it is here the code works
fixture['gush_id'] = 'current.fixed'
data = scrape_gush(fixture, RUN_FOLDER)
various assertions
for example if I create the fixture dict inside the #with_setup block, instead of inside the specific test method (and in everyone of them) I'll get a NameError (or something similar)
I guess I'm missing something, just not sure what.
Thanks for the help!
The issue is that the names RUN_FOLDER and fixture scoped to the function setup and so will not be available to test_scrape_wellformed_html. If you look at the code for with_setup you will see that it does not do anything to alter the run function's environment.
In order to do what you want to do you need to make your fixtures global variables:
testapp = app.test_client()
RUN_FOLDER = os.path.dirname(os.path.realpath(__file__))
fixture = None
def setup():
global fixture
app.config['TESTING'] = True
fixture = {'html_hash':'aaaa'} #mocking the hash
def teardown():
global fixture
app.config['TESTING'] = False
fixture = None
#with_setup(setup, teardown)
def test_scrape_wellformed_html():
# run test here

Why does Flask-Security Cause a new KVSession Record for Each Request?

I'm trying out using Flask-KVSession as an alternative session implementation for a Flask web site. I've created a test website (see Code 1 below). When I run this, I can use the browser to store values into the session by navigating between the various resources in my web browser. This works correctly. Also, when I look at the sessions table in the resulting SQLite database, I see a single record that was being used to store this session the entire time.
Then I try to add Flask-Security to this (see Code 2 below). After running this site (making sure to first delete the existing test.db sqlite file), I am brought to the login prompt and I log in. Then I proceed to do the same thing of jumping back and forth between the resources. I get the same results.
The problem is that when I look in the sqlitebrowser sessions table, there are 8 records. It turns out a new session record was created for EACH request that was made.
Why does a new session record get created for each request when using Flask-Security? Why isn't the existing session updated like it was before?
Code 1 (KVSession without Flask-Security)
import os
from flask import Flask, session
app = Flask(__name__)
app.secret_key = os.urandom(64)
#############
# SQLAlchemy
#############
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy(app)
DB_DIR = os.path.dirname(os.path.abspath(__file__))
DB_URI = 'sqlite:////{0}/test.db'.format(DB_DIR)
app.config['SQLALCHEMY_DATABASE_URI'] = DB_URI
#app.before_first_request
def create_user():
db.create_all()
############
# KVSession
############
from simplekv.db.sql import SQLAlchemyStore
from flask.ext.kvsession import KVSessionExtension
store = SQLAlchemyStore(db.engine, db.metadata, 'sessions')
kvsession = KVSessionExtension(store, app)
#app.route('/a')
def a():
session['last'] = 'b'
return 'Thank you for visiting A!'
#app.route('/b')
def b():
session['last'] = 'b'
return 'Thank you for visiting B!'
#app.route('/c')
def c():
return 'You last visited "{0}"'.format(session['last'])
app.run(debug=True)
Code 2 (KVSession WITH Flask-Security)
import os
from flask import Flask, session
app = Flask(__name__)
app.secret_key = os.urandom(64)
#############
# SQLAlchemy
#############
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy(app)
DB_DIR = os.path.dirname(os.path.abspath(__file__))
DB_URI = 'sqlite:////{0}/test.db'.format(DB_DIR)
app.config['SQLALCHEMY_DATABASE_URI'] = DB_URI
###########
# Security
###########
# This import needs to happen after SQLAlchemy db is created above
from flask.ext.security import (
Security, SQLAlchemyUserDatastore, current_user,
UserMixin, RoleMixin, login_required
)
# Define models
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
security = Security(app, user_datastore)
#app.before_first_request
def create_user():
db.create_all()
user_datastore.create_user(email='test#example.com', password='password')
db.session.commit()
############
# KVSession
############
from simplekv.db.sql import SQLAlchemyStore
from flask.ext.kvsession import KVSessionExtension
store = SQLAlchemyStore(db.engine, db.metadata, 'sessions')
kvsession = KVSessionExtension(store, app)
#app.route('/a')
#login_required
def a():
session['last'] = 'b'
return 'Thank you for visiting A!'
#app.route('/b')
#login_required
def b():
session['last'] = 'b'
return 'Thank you for visiting B!'
#app.route('/c')
#login_required
def c():
return 'You last visited "{0}"'.format(session['last'])
app.run(debug=True)
Version Info
Python 2.7.3
Flask==0.9
Flask==0.9
Flask-KVSession==0.3.2
Flask-Login==0.1.3
Flask-Mail==0.8.2
Flask-Principal==0.3.5
Flask-SQLAlchemy==0.16
Flask-Security==1.6.3
SQLAlchemy==0.8.1
Turns out this is related to a known problem with flask-login (which flask-security uses) when flask-login is used with a session storage library like KVSession.
Basically, KVSession needs to update the database with the new session information whenever data in the session is created or modified. And in the sample above, this happens correctly: the first time I hit a page, the session is created. After that, the existing session is updated.
However, in the background the browser sends a cookie-less request to my web server looking for my favicon. Therefore, flask is handling a request to /favicon.ico. This request (or any other request that would 404) is still handled by flask. This means that flask-login will look at the request and try to do its magic.
It so happens that flask-login doesn't TRY to put anything into the session, but it still LOOKS like the session has been modified as far as KVSession is concerned. Because it LOOKS like the session is modified, KVSession updates the database. The following is code from flask-login:
def _update_remember_cookie(self, response):
operation = session.pop("remember", None)
...
The _update_remember_cookie method is called during the request lifecycle. Although session.pop will not change the session if the session doesn't have the "remember" key (which in this case it doesn't), KVSession still sees a pop and assumes that the session changes.
The issue for flask-login provides the simple bug fix, but it has not been pushed into flask-login. It appears that the maintainer is looking for a complete rewrite, and will implement it there.

Django: is there a way to count SQL queries from an unit test?

I am trying to find out the number of queries executed by a utility function. I have written a unit test for this function and the function is working well. What I would like to do is track the number of SQL queries executed by the function so that I can see if there is any improvement after some refactoring.
def do_something_in_the_database():
# Does something in the database
# return result
class DoSomethingTests(django.test.TestCase):
def test_function_returns_correct_values(self):
self.assertEqual(n, <number of SQL queries executed>)
EDIT: I found out that there is a pending Django feature request for this. However the ticket is still open. In the meantime is there another way to go about this?
Since Django 1.3 there is a assertNumQueries available exactly for this purpose.
One way to use it (as of Django 3.2) is as a context manager:
# measure queries of some_func and some_func2
with self.assertNumQueries(2):
result = some_func()
result2 = some_func2()
Vinay's response is correct, with one minor addition.
Django's unit test framework actually sets DEBUG to False when it runs, so no matter what you have in settings.py, you will not have anything populated in connection.queries in your unit test unless you re-enable debug mode. The Django docs explain the rationale for this as:
Regardless of the value of the DEBUG setting in your configuration file, all Django tests run with DEBUG=False. This is to ensure that the observed output of your code matches what will be seen in a production setting.
If you're certain that enabling debug will not affect your tests (such as if you're specifically testing DB hits, as it sounds like you are), the solution is to temporarily re-enable debug in your unit test, then set it back afterward:
def test_myself(self):
from django.conf import settings
from django.db import connection
settings.DEBUG = True
connection.queries = []
# Test code as normal
self.assert_(connection.queries)
settings.DEBUG = False
If you are using pytest, pytest-django has django_assert_num_queries fixture for this purpose:
def test_queries(django_assert_num_queries):
with django_assert_num_queries(3):
Item.objects.create('foo')
Item.objects.create('bar')
Item.objects.create('baz')
If you don't want use TestCase (with assertNumQueries) or change settings to DEBUG=True, you can use context manager CaptureQueriesContext (same as assertNumQueries using).
from django.db import ConnectionHandler
from django.test.utils import CaptureQueriesContext
DB_NAME = "default" # name of db configured in settings you want to use - "default" is standard
connection = ConnectionHandler()[DB_NAME]
with CaptureQueriesContext(connection) as context:
... # do your thing
num_queries = context.initial_queries - context.final_queries
assert num_queries == expected_num_queries
db settings
In modern Django (>=1.8) it's well documented (it's also documented for 1.7) here, you have the method reset_queries instead of assigning connection.queries=[] which indeed is raising an error, something like that works on django>=1.8:
class QueriesTests(django.test.TestCase):
def test_queries(self):
from django.conf import settings
from django.db import connection, reset_queries
try:
settings.DEBUG = True
# [... your ORM code ...]
self.assertEquals(len(connection.queries), num_of_expected_queries)
finally:
settings.DEBUG = False
reset_queries()
You may also consider resetting queries on setUp/tearDown to ensure queries are reset for each test instead of doing it on finally clause, but this way is more explicit (although more verbose), or you can use reset_queries in the try clause as many times as you need to evaluate queries counting from 0.
Here is the working prototype of context manager withAssertNumQueriesLessThan
import json
from contextlib import contextmanager
from django.test.utils import CaptureQueriesContext
from django.db import connections
#contextmanager
def withAssertNumQueriesLessThan(self, value, using='default', verbose=False):
with CaptureQueriesContext(connections[using]) as context:
yield # your test will be run here
if verbose:
msg = "\r\n%s" % json.dumps(context.captured_queries, indent=4)
else:
msg = None
self.assertLess(len(context.captured_queries), value, msg=msg)
It can be simply used in your unit tests for example for checking the number of queries per Django REST API call
with self.withAssertNumQueriesLessThan(10):
response = self.client.get('contacts/')
self.assertEqual(response.status_code, 200)
Also you can provide exact DB using and verbose if you want to pretty-print list of actual queries to stdout
If you have DEBUG set to True in your settings.py (presumably so in your test environment) then you can count queries executed in your test as follows:
from django.db import connection
class DoSomethingTests(django.test.TestCase):
def test_something_or_other(self):
num_queries_old = len(connection.queries)
do_something_in_the_database()
num_queries_new = len(connection.queries)
self.assertEqual(n, num_queries_new - num_queries_old)
If you want to use a decorator for that there is a nice gist:
import functools
import sys
import re
from django.conf import settings
from django.db import connection
def shrink_select(sql):
return re.sub("^SELECT(.+)FROM", "SELECT .. FROM", sql)
def shrink_update(sql):
return re.sub("SET(.+)WHERE", "SET .. WHERE", sql)
def shrink_insert(sql):
return re.sub("\((.+)\)", "(..)", sql)
def shrink_sql(sql):
return shrink_update(shrink_insert(shrink_select(sql)))
def _err_msg(num, expected_num, verbose, func=None):
func_name = "%s:" % func.__name__ if func else ""
msg = "%s Expected number of queries is %d, actual number is %d.\n" % (func_name, expected_num, num,)
if verbose > 0:
queries = [query['sql'] for query in connection.queries[-num:]]
if verbose == 1:
queries = [shrink_sql(sql) for sql in queries]
msg += "== Queries == \n" +"\n".join(queries)
return msg
def assertNumQueries(expected_num, verbose=1):
class DecoratorOrContextManager(object):
def __call__(self, func): # decorator
#functools.wraps(func)
def inner(*args, **kwargs):
handled = False
try:
self.__enter__()
return func(*args, **kwargs)
except:
self.__exit__(*sys.exc_info())
handled = True
raise
finally:
if not handled:
self.__exit__(None, None, None)
return inner
def __enter__(self):
self.old_debug = settings.DEBUG
self.old_query_count = len(connection.queries)
settings.DEBUG = True
def __exit__(self, type, value, traceback):
if not type:
num = len(connection.queries) - self.old_query_count
assert expected_num == num, _err_msg(num, expected_num, verbose)
settings.DEBUG = self.old_debug
return DecoratorOrContextManager()