django simplejson [] is not JSON serializable - django

in my view, why does this work:
results = []
results.append({'status':1})
results.append({'bookmarks':[]})
simplejson.dumps(results)
# produces: []
and this doesn't:
from myapp.models import Bookmark
results = []
results.append({'status':1})
results.append({'bookmarks':Bookmark.objects.all()})
# fails with exception saying: [] is not JSON serializable
completely stack trace follows
Traceback:
File "/Users/Ishaq/Projects/github/bookmarks/venv/lib/python2.7/site-packages/django/core/handlers/base.py" in get_response
115. response = callback(request, *callback_args, **callback_kwargs)
File "/Users/Ishaq/Projects/github/bookmarks/bookmarks/views.py" in index
9. return HttpResponse(simplejson.dumps(Bookmark.objects.all()), mimetype='application/json');
File "/usr/local/Cellar/python/2.7.3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/__init__.py" in dumps
231. return _default_encoder.encode(obj)
File "/usr/local/Cellar/python/2.7.3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/encoder.py" in encode
201. chunks = self.iterencode(o, _one_shot=True)
File "/usr/local/Cellar/python/2.7.3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/encoder.py" in iterencode
264. return _iterencode(o, 0)
File "/usr/local/Cellar/python/2.7.3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/encoder.py" in default
178. raise TypeError(repr(o) + " is not JSON serializable")
Exception Type: TypeError at /conferences/
Exception Value: [] is not JSON serializable

Instead of using simplejson for serialize django objects, use serialization provided by django.
With reference form the link, you can do:
from django.core import serializers
data = serializers.serialize("json", Bookmark.objects.all())

I was waiting for Burhan Khalid to turn his comment into an answer, but since he hasn't, I would.
using simplejson.dumps(list(Bookmark.objects.all())) made it work

Related

Django response error 'unicode' object has no attribute '_meta' json

I'm using django 1.11 and i'm getting a tough time in storing a Json response.Here's my views.py code
views.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .models import addinfomodels
from django.shortcuts import render, redirect
from django.http import HttpResponse, JsonResponse
from django.core import serializers
import json
# Create your views here.
def addinfo(request):
batch_year = [2016, 2017, 2018]
dept = ['AERO', 'BME', 'CIVIL', 'CSE', 'ECE', 'EEE', 'E&I', 'MECH']
type = ['onecredit', 'core', 'professional', 'openelective']
return render(request, "cbcsportal/addinfo.html", {'type': type, 'batch': batch_year, 'dept': dept})
def rollvalue(request):
return request.POST.get('rollno')
# d ={}
def jsonvalue(request):
d = {"courses":[{"choices": [request.POST.get('choices00') ,request.POST.get('choices10')], "code": request.POST.get('code0'), "name": request.POST.get('name10')}]}
ds = serializers.serialize('json', d)
print ds
return JsonResponse(ds, content_type="application/json", safe=False)
def posttodb(request):
if request.method == "POST":
data = addinfomodels()
data.batch = request.POST.get('batch')
data.dept = request.POST.get('dept')
data.typeid = request.POST.get('typeid')
data.type = request.POST.get('type')
data.rollno = [rollvalue(request)]
data.renderJSON = jsonvalue(request)
data.starttime = request.POST.get('starttime0')
data.endtime = request.POST.get('endtime0')
data.save()
return redirect('addinfo')
please help me i'm getting this error
'unicode' object has no attribute '_meta'
this is the traceback
Traceback:
Traceback:
File "C:\Python27\lib\site-packages\django\core\handlers\exception.py" in inner
41. response = get_response(request)
File "C:\Python27\lib\site-packages\django\core\handlers\base.py" in _get_response
187. response = self.process_exception_by_middleware(e, request)
File "C:\Python27\lib\site-packages\django\core\handlers\base.py" in _get_response
185. response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "C:\Users\admin\Desktop\SREC_OBA\cbcsportal\views.py" in posttodb
34. data.renderJSON = jsonvalue(request)
File "C:\Users\admin\Desktop\SREC_OBA\cbcsportal\views.py" in jsonvalue
21. ds = serializers.serialize('json', d)
File "C:\Python27\lib\site-packages\django\core\serializers__init__.py" in serialize
129. s.serialize(queryset, **options)
File "C:\Python27\lib\site-packages\django\core\serializers\base.py" in serialize
84. concrete_model = obj._meta.concrete_model
Exception Type: AttributeError at /cbcs/posttodb
Exception Value: 'unicode' object has no attribute '_meta'
Here:
d = {....}
ds = serializers.serialize('json', d)
#print ds
return JsonResponse(ds, content_type="application/json", safe=False)
You are passing a dict to serialize(). Django serializer are for serializing django's orm querysets (this is documented, and you could gave found out by reading the traceback).
The proper way to serialize a python dict to json is quite simply to use json.dumps(yourdict). BUT : in your case this is useless anyway, since JsonResponse expects a python dict as first argument and will take care of the serialization. Also you don't need to set the content type, it's already the default for JsonResponse. IOW, all you need is:
d = {....}
return JsonResponse(d, safe=False)
As a side note: here:
def posttodb(request):
if request.method == "POST":
data = addinfomodels()
data.batch = request.POST.get('batch')
data.dept = request.POST.get('dept')
data.typeid = request.POST.get('typeid')
data.type = request.POST.get('type')
data.rollno = [rollvalue(request)]
data.renderJSON = jsonvalue(request)
data.starttime = request.POST.get('starttime0')
data.endtime = request.POST.get('endtime0')
data.save()
You're inserting data in your db that are unvalidated, unsanitized user inputs. DONT DO THAT !!! (unless you're happy to have your server hacked by the first script kiddie of course). Use Django forms (in this case ModelForm to take care of validation and sanitization.

scrapy TypeError: object() takes no parameters

I am new to Scrapy and trying to crawl a couple of links as a test using Scrapy. Whenever I run scrapy crawl tier1, I get "TypeError: object() takes no parameters" as the following:
Traceback (most recent call last):
File "/Users/btaek/TaeksProgramming/adv/crawler/lib/python2.7/site-packages/twisted/internet/defer.py", line 653, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/Users/btaek/TaeksProgramming/adv/crawler/adv_crawler/adv_crawler/spiders/tier1_crawler.py", line 93, in parse
mk_loader.add_xpath('title', 'h1[#class="top_title"]') # Title of the article
File "/Users/btaek/TaeksProgramming/adv/crawler/lib/python2.7/site-packages/scrapy/loader/__init__.py", line 167, in add_xpath
self.add_value(field_name, values, *processors, **kw)
File "/Users/btaek/TaeksProgramming/adv/crawler/lib/python2.7/site-packages/scrapy/loader/__init__.py", line 77, in add_value
self._add_value(field_name, value)
File "/Users/btaek/TaeksProgramming/adv/crawler/lib/python2.7/site-packages/scrapy/loader/__init__.py", line 91, in _add_value
processed_value = self._process_input_value(field_name, value)
File "/Users/btaek/TaeksProgramming/adv/crawler/lib/python2.7/site-packages/scrapy/loader/__init__.py", line 150, in _process_input_value
return proc(value)
File "/Users/btaek/TaeksProgramming/adv/crawler/lib/python2.7/site-packages/scrapy/loader/processors.py", line 28, in __call__
next_values += arg_to_iter(func(v))
TypeError: object() takes no parameters
2017-08-23 17:25:02 [tier1-parse-logger] INFO: Entered the parse function to parse and index: http://news.mk.co.kr/newsRead.php?sc=30000001&year=2017&no=535166
2017-08-23 17:25:02 [tier1-parse-logger] ERROR: Error (object() takes no parameters) when trying to parse <<date>> from a mk article: http://news.mk.co.kr/newsRead.php?sc=30000001&year=2017&no=535166
2017-08-23 17:25:02 [tier1-parse-logger] ERROR: Error (object() takes no parameters) when trying to parse <<author>> from a mk article: http://news.mk.co.kr/newsRead.php?sc=30000001&year=2017&no=535166
2017-08-23 17:25:02 [scrapy.core.scraper] ERROR: Spider error processing <GET http://news.mk.co.kr/newsRead.php?sc=30000001&year=2017&no=535166> (referer: None)
Traceback (most recent call last):
File "/Users/btaek/TaeksProgramming/adv/crawler/lib/python2.7/site-packages/twisted/internet/defer.py", line 653, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/Users/btaek/TaeksProgramming/adv/crawler/adv_crawler/adv_crawler/spiders/tier1_crawler.py", line 93, in parse
mk_loader.add_xpath('title', 'h1[#class="top_title"]') # Title of the article
File "/Users/btaek/TaeksProgramming/adv/crawler/lib/python2.7/site-packages/scrapy/loader/__init__.py", line 167, in add_xpath
self.add_value(field_name, values, *processors, **kw)
File "/Users/btaek/TaeksProgramming/adv/crawler/lib/python2.7/site-packages/scrapy/loader/__init__.py", line 77, in add_value
self._add_value(field_name, value)
File "/Users/btaek/TaeksProgramming/adv/crawler/lib/python2.7/site-packages/scrapy/loader/__init__.py", line 91, in _add_value
processed_value = self._process_input_value(field_name, value)
File "/Users/btaek/TaeksProgramming/adv/crawler/lib/python2.7/site-packages/scrapy/loader/__init__.py", line 150, in _process_input_value
return proc(value)
File "/Users/btaek/TaeksProgramming/adv/crawler/lib/python2.7/site-packages/scrapy/loader/processors.py", line 28, in __call__
next_values += arg_to_iter(func(v))
TypeError: object() takes no parameters
And, my spider file (tier1_crawler.py):
# -*- coding: utf-8 -*-
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import os
sys.path.append(os.path.abspath('..'))
import logging
import scrapy
from scrapy.loader import ItemLoader
from adv_crawler.items import AdvCrawlerItem
from datetime import datetime, date, time
t1_parse_logger = logging.getLogger("tier1-parse-logger")
t1_parse_logger.LOG_FILE = "Tier1-log.txt"
content_type_dic = {
'news': 'news',
}
class Tier1Crawler(scrapy.Spider):
name = "tier1"
def start_requests(self):
urls = ['http://news.mk.co.kr/newsRead.php?sc=30000001&year=2017&no=535982',
'http://news.mk.co.kr/newsRead.php?sc=30000001&year=2017&no=535166',
]
for url in urls:
yield scrapy.Request(url=url, callback=self.parse)
def parse(self, response):
t1_parse_logger.info("Entered the parse function to parse and index: %s" % response.url) # Log at the beginning of the parse function
item_loader = ItemLoader(item=AdvCrawlerItem(), response=response)
if 'mk.co.kr' in response.url:
mk_loader = item_loader.nested_xpath('//div[#id="top_header"]/div[#class="news_title"]/div[#class="news_title_text"]')
try:
mk_loader.add_xpath('date', 'div[#class="news_title_author"]/ul/li[#class="lasttime"]')
except AttributeError: # if the date is not in "lasttime" li tag
mk_loader.add_xpath('date', 'div[#class="news_title_author"]/ul/li[#class="lasttime1"]')
except Exception as e: # in case the error is not AttributeError
t1_parse_logger.error("Error "+"("+str(e)+")"+" when trying to parse <<date>> from a mk article: %s" % response.url)
try:
mk_loader.add_xpath('author', 'div[#class="news_title_author"]/ul/li[#class="author"]')
except AttributeError: # in case there is no author (some mk articles have no author)
item_loader.add_value('author', "None") # ir error, replace with the line below
# item['author'] = "None" # if the above gives any error, replace the above with this line
except Exception as e: # in case the error is not AttributeError
t1_parse_logger.error("Error "+"("+str(e)+")"+" when trying to parse <<author>> from a mk article: %s" % response.url)
item_loader.add_xpath('content', '//div[#id="Content"]/div[#class="left_content"]/div[#id="article_body"]/div[#class="art_txt"]') # Content of the article (entire contents)
mk_loader.add_xpath('title', 'h1[#class="top_title"]') # Title of the article
item_loader.add_value('content_type', content_type_dic['news'])
item_loader.add_value('timestamp', str(datetime.now())) # timestamp of when the document is being indexed
item_loader.add_value('url', response.url) # url of the article
t1_parse_logger.info("Parsed and indexed: %s" % response.url)
return item_loader.load_item()
And, my items.py file:
# -*- coding: utf-8 -*-
import scrapy
from scrapy.loader.processors import Join, MapCompose, TakeFirst
from w3lib.html import remove_tags
def filter_date(value):
if isinstance(value, unicode):
(year, month, day) = str(value.split(" ")[-2]).split(".")
return year+"-"+month+"-"+day
def filter_utf(value):
if isinstance(value, unicode):
return value.encode('utf-8')
class AdvCrawlerItem(scrapy.Item):
author = scrapy.Field(input_processor=MapCompose(remove_tags, TakeFirst, filter_utf),) # Name of the publisher/author
content = scrapy.Field(input_processor=MapCompose(remove_tags, Join, filter_utf),) # Content of the article (entire contents)
content_type = scrapy.Field()
date = scrapy.Field(input_processor=MapCompose(remove_tags, TakeFirst, filter_date),)
timestamp = scrapy.Field() # timestamp of when the document is being indexed
title = scrapy.Field(input_processor=MapCompose(remove_tags, TakeFirst, filter_utf),) # title of the article
url = scrapy.Field() # url of the article
And, pipelines.py file:
import json
from scrapy import signals
from scrapy.exporters import JsonLinesItemExporter
class AdvCrawlerJsonExportPipeline(object):
def open_spider(self, spider):
self.file = open('crawled-articles1.txt', 'w')
def close_spider(self, spider):
self.file.close()
def process_item(self, item, spider):
line = json.dummps(dict(item)) + "\n"
self.file.write(line)
return item
I am aware that "TypeError: object() takes no parameters" error is usually thrown when __init__ method of a class is not defined at all or not defined to take in parameter(s).
However, in the case above, how can i fix the error? Am I doing something wrong using the item loader or nested item loader??
When using scrapy processors you should use the classes to create objects that do the processing:
# wrong
field = Field(output_processor=MapCompose(TakeFirst))
# right
field = Field(output_processor=MapCompose(TakeFirst()))
^^

flask_restful cannot return file

I have the following flak_restful file to return back a csv file to the user, but I'm getting the following error.
File "/usr/local/Cellar/python3/3.6.1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/json/encoder.py", line 180, in default
o.__class__.__name__)
TypeError: Object of type 'Response' is not JSON serializable
import flask
from flask import request
from flask_restful import Resource
class MyAPIRes(Resource):
#classmethod
def get(cls):
csv = '1,2,3\n4,5,6\n'
response = flask.make_response(csv)
response.headers['content-type'] = 'application/octet-stream'
return response, 200
Removing the , 200 from the second part of the return statement should work.
That second argument will call a helper from flask_restful to create a response, but in this case, you've already gone ahead and created a response object with make_response. That's fine, you need to create your own object to return anything other than JSON. But one of the things the helper does is serialize your data for you (turn it into JSON), and the Response type is not serializable.
Taking a look at the trace, you can see it happening in the error message here:
File ".../flask_restful/__init__.py", line 510, in make_response
resp = self.representations[mediatype](data, *args, **kwargs)
File ".../flask_restful/representations/json.py", line 20, in output_json
dumped = dumps(data, **settings) + "\n"
Docs on response types can be found in the section on flask_restful response formats.

Haystack: KeyError in the elasticsearch_backend module

I'm using Django + Haystack + Elasticsearch.
When I send a request to this view
from haystack.views import FacetedSearchView
from .models import Object
class ObjectView(FacetedSearchView):
def extra_context(self):
extra = super(ObjectView, self).extra_context()
if not self.results:
extra['objects'] = Object.objects.all()
else:
searchqueryset = self.form.search()
results = [ result.pk for result in searchqueryset ]
extra['facets'] = self.results.facet_counts()
extra['objects'] = Object.objects.filter(pk__in=results)
extra['results'] = self.results
return extra
this error is raised:
File "/home/deploy/.virtualenvs/deploy/local/lib/python2.7/site-packages/django/core/handlers/base.py", line 115, in get_response
response = callback(request, *callback_args, **callback_kwargs)
File "/home/deploy/.virtualenvs/deploy/local/lib/python2.7/site-packages/haystack/views.py", line 49, in __call__
return self.create_response()
File "/home/deploy/.virtualenvs/deploy/local/lib/python2.7/site-packages/haystack/views.py", line 129, in create_response
(paginator, page) = self.build_page()
File "/home/deploy/.virtualenvs/deploy/local/lib/python2.7/site-packages/haystack/views.py", line 106, in build_page
self.results[start_offset:start_offset + self.results_per_page]
File "/home/deploy/.virtualenvs/deploy/local/lib/python2.7/site-packages/haystack/query.py", line 266, in __getitem__
self._fill_cache(start, bound)
File "/home/deploy/.virtualenvs/deploy/local/lib/python2.7/site-packages/haystack/query.py", line 164, in _fill_cache
results = self.query.get_results(**kwargs)
File "/home/deploy/.virtualenvs/deploy/local/lib/python2.7/site-packages/haystack/backends/__init__.py", line 485, in get_results
self.run(**kwargs)
File "/home/deploy/.virtualenvs/deploy/local/lib/python2.7/site-packages/haystack/backends/elasticsearch_backend.py", line 942, in run
results = self.backend.search(final_query, **search_kwargs)
File "/home/deploy/.virtualenvs/deploy/local/lib/python2.7/site-packages/haystack/backends/__init__.py", line 26, in wrapper
return func(obj, query_string, *args, **kwargs)
File "/home/deploy/.virtualenvs/deploy/local/lib/python2.7/site-packages/haystack/backends/elasticsearch_backend.py", line 521, in search
distance_point=kwargs.get('distance_point'), geo_sort=geo_sort)
File "/home/deploy/.virtualenvs/deploy/local/lib/python2.7/site-packages/haystack/backends/elasticsearch_backend.py", line 571, in _process_results
raw_suggest = raw_results['suggest']['suggest']
KeyError: 'suggest'
A curious fact: the problem occurs only when the project is under production settings, even when I haven't changed a single thing involving Haystack or Elasticsearch in the settings_production module(except for the URL key).
project/settings_production.py
'URL': 'http://0.0.0.0:9200/'
In production, I'm using nothing more than a simple FastCGI.
And here's what really bothers me: sometimes I get no errors on this view, and everything works just fine...
Please, someone has an idea of what's going on?
Thanks a lot!
UPDATE:
SO, I setup my whole project in another computer. After some tests I verified:
this problem is not related to my production settings like I
described above;
the error is not raised when the elasticsearch service is stopped;
if the service is running:
when the method Object.objects.all() returns some QuerySet results, I got no errors;
when the method Object.objects.all() returns an empty QuerySet, the problem persists;
I guess this is some kind of bug in the Haystack's elasticsearch_backend module.
Still, i'm not sure.
Yup, it's a bug in haystack. I've put in a pull request, but in the meantime, options to get running are:
Set INCLUDE_SPELLING in your haystack settings to False, or
Use our fork: https://github.com/greenkahuna/django-haystack

testing django web app that uses cookies/session

In views.py:
get_dict = Site.objects.getDictionary(request.COOKIES['siteid'])
{gets a dictionary with site information based on id from cookie}
In tests.py:
from django.test import TestCase
class WebAppTest(TestCase):
def test_status(self):
response = self.client.get('/main/',{})
response.status_code # --->passed with code 200
response = self.client.get('/webpage/',{'blog':1})
response.status_code # ----> this is failing
In order to present blog page it goes to a view where it gets a dictionary using existing cookie, process it, renders templates, which works fine when running the app. But the tests are failing.Having never tested Django webapps I'm not sure how to test it right. Here is the traceback.
Traceback (most recent call last):
File "<console>", line 2, in <module>
File "/usr/lib/pymodules/python2.6/django/test/client.py", line 313, in post
response = self.request(**r)
File "/usr/lib/pymodules/python2.6/django/core/handlers/base.py", line 92, in get_response
response = callback(request, *callback_args, **callback_kwargs)
File "/var/lib/django/data/../webpage/views.py", line 237, in getCostInfo
get_dict = Site.objects.getDictionary(request.COOKIES['siteid'])
KeyError: 'siteid'
Went through some online samples but couldn't find something that deals in depth with cookies/sessions. Any ideas or directs to useful links are highly appreciated.
Take a look at the Persistent State section of the Django Testing docs.
In your case, I would expect your test to be something more like:
from django.test import TestCase
from django.test.client import Client
class WebAppTest(TestCase):
def setUp(self):
self.client = Client()
session = self.client.session
session['siteid'] = 69 ## Or any valid siteid.
session.save()
def test_status(self):
response = self.client.get('/main/',{})
self.assertEqual(response.status_code, 200)
response = self.client.get('/webpage/',{'blog':1})
self.assertEqual(response.status_code, 200)