Test datetime object using flask and mockupdb (mongodb) - flask

I'm testing MongoDB as DB with a Flask's REST Server (and flask-pymongo), using the mockupdb module. I want to receive a DateTime in the json request, and store it as Date object, to perform some range query using this field in the future, so, I send the data as EJSON (BSON) to keep the data exactly as I.
This is the testcase:
#pytest.fixture()
def client_and_mongoserver():
random.seed()
mongo_server = MockupDB(auto_ismaster=True, verbose=True)
mongo_server.run()
config = Config()
config.MONGO_URI = mongo_server.uri + '/test'
flask_app = create_app(config)
flask_app.testing = True
client = flask_app.test_client()
yield client, mongo_server
mongo_server.stop()
def test_insert(client_and_mongoserver):
client, server = client_and_mongoserver
headers = [('Content-Type', 'application/json')]
id = str(uuid.uuid4()).encode('utf-8')[:12]
now = datetime.now()
obj_id = ObjectId(id)
toInsert = {
"_id": obj_id,
"datetime": now
}
toVerify = {
"_id": obj_id,
"datetime": now
}
future = go(client.post, '/api/insert', data=dumps(toInsert), headers=headers)
request = server.receives(
OpMsg({
'insert': 'test',
'ordered': True,
'$db': "test",
'$readPreference': {"mode": "primary"},
'documents': [
toVerify
]
}, namespace='test')
)
request.ok(cursor={'inserted_id': id})
# act
http_response = future()
# assert
data = http_response.get_data(as_text=True)
This is the endpoint. Before the insert call I convert the datetime string to datetime object:
from flask_restful import Resource
from bson import json_util
class Insert(Resource):
def post(self):
if not request.json:
abort(400)
json_data = json_util.loads(request.data)
result = mongo.db.test.insert_one(json_data)
return {'message': 'OK'}, 200
But the test generate this assertion:
self = MockupDB(localhost, 37213)
args = (OpMsg({"insert": "test", "ordered": true, "$db": "test", "$readPreference": {"mode": "primary"}, "documents": [{"_id": {"$oid": "63343264363661622d393764"}, "datetime": {"$date": 1543493218306}}]}, namespace="test"),)
kwargs = {}, timeout = 10, end = 1543504028.309115
matcher = Matcher(OpMsg({"insert": "test", "ordered": true, "$db": "test", "$readPreference": {"mode": "primary"}, "documents": [{"_id": {"$oid": "63343264363661622d393764"}, "datetime": {"$date": 1543493218306}}]}, namespace="test"))
request = OpMsg({"insert": "test", "ordered": true, "$db": "test", "$readPreference": {"mode": "primary"}, "documents": [{"_id": {"$oid": "63343264363661622d393764"}, "datetime": {"$date": 1543493218306}}]}, namespace="test")
def receives(self, *args, **kwargs):
"""Pop the next `Request` and assert it matches.
Returns None if the server is stopped.
Pass a `Request` or request pattern to specify what client request to
expect. See the tutorial for examples. Pass ``timeout`` as a keyword
argument to override this server's ``request_timeout``.
"""
timeout = kwargs.pop('timeout', self._request_timeout)
end = time.time() + timeout
matcher = Matcher(*args, **kwargs)
while not self._stopped:
try:
# Short timeout so we notice if the server is stopped.
request = self._request_q.get(timeout=0.05)
except Empty:
if time.time() > end:
raise AssertionError('expected to receive %r, got nothing' % matcher.prototype)
else:
if matcher.matches(request):
return request
else:
raise AssertionError('expected to receive %r, got %r'
> % (matcher.prototype, request))
E AssertionError: expected to receive OpMsg({"insert": "test", "ordered": true, "$db": "test", "$readPreference": {"mode": "primary"}, "documents": [{"_id": {"$oid": "63343264363661622d393764"}, "datetime": {"$date": 1543493218306}}]}, namespace="test"), got OpMsg({"insert": "test", "ordered": true, "$db": "test", "$readPreference": {"mode": "primary"}, "documents": [{"_id": {"$oid": "63343264363661622d393764"}, "datetime": {"$date": 1543493218306}}]}, namespace="test")
.venv/lib/python3.6/site-packages/mockupdb/__init__.py:1291: AssertionError
The value match but the assertion is raised either way.
How can I test the Date object using flask?
EDIT:
As pointed out by #bauman.space. The lack of:
'$db': 'test', # this key appears somewhere at the driver
'$readPreference': {"mode": "primary"}, # so does this one
Don't affect the validation made by mockupdb. I'd tested that in other test cases.
EDIT 2: Change question to prevent confusion

your assertion is quite descriptive
AssertionError:
expected to receive
OpMsg(
{"insert": "test",
"ordered": true,
"documents": [{"_id": "a3dbe8a7e1cc43469b706a8877b0a14a",
"datetime": {"$date": 1542901445120}}]
}, namespace="test"
),
got
OpMsg(
{"insert": "test",
"ordered": true,
"$db": "test",
"$readPreference": {"mode": "primary"},
"documents": [{"_id": "a3dbe8a7e1cc43469b706a8877b0a14a",
"datetime": {"$date": 1542901445120}}]
}, namespace="test")
looks like you simply need to include some of the standard MongoDB keys in your verification code.
Swap yours out with this and give it a try?
request = server.receives(
OpMsg({
'insert': 'test',
'ordered': True,
'$db': 'test', # this key appears somewhere at the driver
'$readPreference': {"mode": "primary"}, # so does this one
'documents': [
toVerify
]
}, namespace='test')
)

Related

Get Dimensions for USAGE_TYPE AWS Boto3 CostExplorer Client

I'm trying to get Costs using CostExplorer Client in boto3. But I can't find the values to use as a Dimension filter. The documentation says that we can extract those values from GetDimensionValues but how do I use GetDimensionValues.
response = client.get_cost_and_usage(
TimePeriod={
'Start': str(start_time).split()[0],
'End': str(end_time).split()[0]
},
Granularity='DAILY',
Filter = {
'Dimensions': {
'Key':'USAGE_TYPE',
'Values': [
'DataTransfer-In-Bytes'
]
}
},
Metrics=[
'NetUnblendedCost',
],
GroupBy=[
{
'Type': 'DIMENSION',
'Key': 'SERVICE'
},
]
)
The boto3 reference for GetDimensionValues has a lot of details on how to use that call. Here's some sample code you might use to print out possible dimension values:
response = client.get_dimension_values(
TimePeriod={
'Start': '2022-01-01',
'End': '2022-06-01'
},
Dimension='USAGE_TYPE',
Context='COST_AND_USAGE',
)
for dimension_value in response["DimensionValues"]:
print(dimension_value["Value"])
Output:
APN1-Catalog-Request
APN1-DataTransfer-Out-Bytes
APN1-Requests-Tier1
APN2-Catalog-Request
APN2-DataTransfer-Out-Bytes
APN2-Requests-Tier1
APS1-Catalog-Request
APS1-DataTransfer-Out-Bytes
.....

IndexError: list index out of range with moto

I am mocking an internal function which is returning dynamodb query. the query had begins_with which was throwing error IndexError: list index out of range.
I changed the query and removed begins_with yet still getting the same error. If I remove AND condition from KeyConditionExpression then the query works.
Below is the query:
val = 'test#val#testing'
input_query = {
'TableName': <table_name>,
'KeyConditionExpression': '#23b62 = :23b62 And #23b63 = :23b63)',
'FilterExpression': 'contains(#23b64, :23b64)',
'ProjectionExpression': '#23b60,#23b61',
'ExpressionAttributeNames': {'#23b60': 'level', '#23b61': 'test_id', '#23b62': 'PK', '#23b63': 'SK', '#23b64': 'used_in'},
'ExpressionAttributeValues': {':23b62': {'S': 'testing'}, ':23b63': {'S': val}, ':23b64': {'S': 'test'}}
}
New Query :
dynamodb_client.query(TableName="table",
KeyConditionExpression = "#PK = :PK And #SK = :SK",
ExpressionAttributeNames = {
"#PK": "PK",
"#SK": "SK"
},
FilterExpression = "contains(Used, :used)",
ExpressionAttributeValues ={
":PK": {"S": "tests"},
":SK": {"S": "test#en#testing"},
":used": {"S": "testing"}
}
)
Test case:
from botocore.exceptions import ClientError
from dynamodb_json import json_util as dynamodb_json
import logging
from contextlib import contextmanager
import pytest
from unittest.mock import patch
#contextmanager
def ddb_setup(dynamodb_resource):
table = dynamodb_resource.create_table(
TableName='table',
KeySchema=[
{
'AttributeName': 'PK',
'KeyType': 'HASH'
}, {
'AttributeName': 'SK',
'KeyType': 'SORT'
},
],
AttributeDefinitions=[
{
'AttributeName': 'PK',
'AttributeType': 'S'
}, {
'AttributeName': 'SK',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 1,
'WriteCapacityUnits': 1,
}
)
yield
class TestDynamoDB:
def test_create_table(self, dynamodb_resource, dynamodb_client):
with ddb_setup(dynamodb_resource):
try:
response = dynamodb_client.describe_table(
TableName='table')
resp = dynamodb_client.query(TableName="table",
KeyConditionExpression = "#PK = :PK And #SK = :SK",
ExpressionAttributeNames = {
"#PK": "PK",
"#SK": "SK"
},
FilterExpression = "contains(Used, :used)",
ExpressionAttributeValues ={
":PK": {"S": "tests"},
":SK": {"S": "test#en#testing"},
":used": {"S": "testing"}
}
)
except ClientError as err:
logger.error(f"error: {err.response['Error']['Code']}", )
assert err.response['Error']['Code'] == 'ResourceNotFoundException'
Could anyone suggest how can I run this query with moto with AND condition.
Here is an example of a working test configuration using pytest and moto. I've added code that shows how to use the AND condition using the resource and client API.
import boto3
import boto3.dynamodb.conditions as conditions
import moto
import pytest
TABLE_NAME = "data"
#pytest.fixture
def test_table():
with moto.mock_dynamodb():
client = boto3.client("dynamodb")
client.create_table(
AttributeDefinitions=[
{"AttributeName": "PK", "AttributeType": "S"},
{"AttributeName": "SK", "AttributeType": "S"}
],
TableName=TABLE_NAME,
KeySchema=[
{"AttributeName": "PK", "KeyType": "HASH"},
{"AttributeName": "SK", "KeyType": "RANGE"}
],
BillingMode="PAY_PER_REQUEST"
)
table = boto3.resource("dynamodb").Table(TABLE_NAME)
table.put_item(Item={
"PK": "pk_value",
"SK": "sk_value"
})
yield TABLE_NAME
def test_query_with_and_using_resource(test_table):
table = boto3.resource("dynamodb").Table(TABLE_NAME)
response = table.query(
KeyConditionExpression=conditions.Key("PK").eq("pk_value") & conditions.Key("SK").eq("sk_value")
)
assert len(response["Items"]) == 1
def test_query_with_and_using_client(test_table):
client = boto3.client("dynamodb")
response = client.query(
TableName=TABLE_NAME,
KeyConditionExpression="#PK = :PK AND #SK = :SK",
ExpressionAttributeNames={
"#PK": "PK",
"#SK": "SK"
},
ExpressionAttributeValues={
":PK": {"S": "pk_value"},
":SK": {"S": "sk_value"}
}
)
assert len(response["Items"]) == 1
First, we set up a table with a dummy item, and then there are two tests, the first for the resource and the second for the client API. Maybe this helps you figure out the mistake.
AWS uses the keyword RANGE to indicate that something is a sort-key. (No idea why..)
If you replace:
'KeyType': 'SORT'
with
'KeyType': 'RANGE'
the test passes.
I'm assuming that AWS throws a more obvious error when creating a table with an unknown KeyType. If you want, you can create a feature request on Moto's Github for Moto to replicate that behaviour and throw the same exception.

How to format json response in django?

I am retrieving data from multiple tables in Django.
my current response is :
{
"status": 0,
"message": "Client details retrived successfully...!!!",
"results": [
{
"id": 11,
"client_id": "CL15657917080578748000",
"client_name": "Pruthvi Katkar",
"client_pan_no": "RGBB004A11",
"client_adhar_no": "12312312313",
"legal_entity_name": "ABC",
"credit_period": "6 months",
"client_tin_no": 4564565,
"client_email_id": "abc#gmail.com",
"head_office_name": "ABC",
"office_name": "asd234",
"office_email_id": "zxc#gmail.com",
"office_contact": "022-27547119",
"gst_number": "CGST786876876",
"office_country": "India",
"office_state": "gujrat",
"office_district": "vadodara",
"office_taluka": "kachh",
"office_city": "vadodara",
"office_street": "New rode 21",
"office_pincode": 2344445,
"contact_person_name": "prasad",
"contact_person_designation": "DM",
"contact_person_number": "456754655",
"contact_person_email": "asd#gmail.com",
"contact_person_mobile": "5675545654",
"created_at": "2019-08-14T14:08:28.057Z",
"created_by": "Prathamseh",
"updated_at": "2019-08-14T14:08:28.057Z",
"updated_by": "prasad",
"is_deleted": false
},
{
"id": 11,
"user_id": "CL15657917080578748000",
"bank_details_id": "BL15657917080778611000",
"bank_name": "Pruthvi",
"branch": "vashi",
"ifsc_code": "BOI786988",
"account_number": 56756765765765,
"account_name": "Pruthvi",
"is_deleted": false
},
{
"id": 10,
"document_details_id": "DL15657917080808598000",
"user_id": "CL15657917080578748000",
"document_type": "Pruthvi ID",
"document": "www.sendgrid.com/pan",
"is_deleted": false
}
]
}
Expected Response :
I am getting the queryset form db in models.py and i am sending it to the views.py and i am iterating over the dict but not getting the expected response.
views.py
#csrf_exempt
def get_client_details(request):
try:
# Initialising lists for storing results
result = []
temp_array = []
# Getting data from request body
client_master_dict = json.loads(request.body)
# Response from get client data
records = ClientDetails.get_client_data(client_master_dict)
# Create response object
# Iterating over the records object for getting data
for i in range(len(records)):
# Converting the querysets objects to json array format
record_result_list = list(records[i].values())
# If multiple records are present
if(len(record_result_list) > 1):
for j in range(len(record_result_list)):
user_info = record_result_list[j]
temp_array.append(user_info)
result.append(temp_array)
temp_array=[]
# For single record
else:
result.append(record_result_list[0])
# Success
returnObject = {
"status" : messages.SUCCESS,
"message" : messages.CLIENT_RETRIVE_SUCCESS,
"results" : result
}
return JsonResponse(returnObject,safe=False)
I think the issue might be in my inner for loop, can anyone help me out with this, is there any way to iterate over the nested JSON object.
Models.py
#classmethod
def get_client_data(cls, client_master_dict):
try:
response_list = []
client_id = client_master_dict['client_id']
client_details = cls.objects.filter(client_id = client_id,is_deleted = False)
bank_details = BankDetails.objects.filter(user_id = client_id,is_deleted = False)
document_details = DocumentDetails.objects.filter(user_id = client_id,is_deleted = False)
response_list.append(client_details)
response_list.append(bank_details)
response_list.append(document_details)
return response_list
except(Exception) as error:
print("Error in get_client_data",error)
return False
Here i'm fetching data from 3 tables and adding it into list.
After printing the data on console i am getting :
[{'id': 11, 'client_id': 'CL15657917080578748000', 'client_name': 'Pruthvi Katkar', 'client_pan_no': 'RGBB004A11', 'client_adhar_no': '12312312313', 'legal_entity_name': 'ABC', 'credit_period': '6 months', 'client_tin_no': 4564565, 'client_email_id': 'abc#gmail.com', 'head_office_name': 'ABC', 'office_name': 'asd234', 'office_email_id': 'zxc#gmail.com', 'office_contact': '022-27547119', 'gst_number': 'CGST786876876', 'office_country': 'India', 'office_state': 'gujrat', 'office_district': 'vadodara', 'office_taluka': 'kachh', 'office_city': 'vadodara', 'office_street': 'New rode 21', 'office_pincode': 2344445, 'contact_person_name': 'prasad', 'contact_person_designation': 'DM', 'contact_person_number': '456754655', 'contact_person_email': 'asd#gmail.com', 'contact_person_mobile': '5675545654', 'created_at': datetime.datetime(2019, 8, 14, 14, 8, 28, 57874, tzinfo=<UTC>), 'created_by': 'Prathamseh', 'updated_at': datetime.datetime(2019, 8, 14, 14, 8, 28, 57874, tzinfo=<UTC>), 'updated_by': 'prasad', 'is_deleted': False}]
[{'id': 11, 'user_id': 'CL15657917080578748000', 'bank_details_id': 'BL15657917080778611000', 'bank_name': 'Pruthvi', 'branch': 'vashi', 'ifsc_code': 'BOI786988', 'account_number': 56756765765765, 'account_name': 'Pruthvi', 'is_deleted': False}]
[{'id': 10, 'document_details_id': 'DL15657917080808598000', 'user_id': 'CL15657917080578748000', 'document_type': 'Pruthvi ID', 'document': 'www.sendgrid.com/pan', 'is_deleted': False}]
Did you check the output of record_result_list? You can outright tell their if it's recovering the data in the format you requested. Try the printing to screen method to debug.
As far as I cam see, the expected output and the hierarchy of results for bank details are not matching. I don't know how you are handling the hierarchy. Are you directly taking it from JSON as the hierarchy? Or are you just taking the data and creating hierarchy in the expected output?

ElasticSearch: Getting old visitor data into an index

I'm learning ElasticSearch in the hopes of dumping my business data into ES and viewing it with Kibana. After a week of various issues I finally have ES and Kibana working (1.7.0 and 4 respectively) on 2 Ubuntu 14.04 desktop machines (clustered).
The issue I'm having now is how to get the data into ES best. The data flow is that I capture the PHP global variables $_REQUEST and $_SERVER for each visit to text file with a unique ID. From there, if they fill in a form I capture that data in a text file also named with that unique ID in a different directory. Then my customers tell me if that form fill was any good with a delay of up to 50 days.
So I'm starting with the visitor data - $_REQUEST and $_SERVER. A lot of it is redundant so I'm really just attempting to capture the timestamp of their arrival, their IP, the IP of the server they visited, the domain they visited, the unique ID, and their User Agent. So I created this mapping:
time_date_mapping = { 'type': 'date_time' }
str_not_analyzed = { 'type': 'string'} # Originally this included 'index': 'not analyzed' as well
visit_mapping = {
'properties': {
'uniqID': str_not_analyzed,
'pages': str_not_analyzed,
'domain': str_not_analyzed,
'Srvr IP': str_not_analyzed,
'Visitor IP': str_not_analyzed,
'Agent': { 'type': 'string' },
'Referrer': { 'type': 'string' },
'Entrance Time': time_date_mapping, # Stored as a Unix timestamp
'Request Time': time_date_mapping, # Stored as a Unix timestamp
'Raw': { 'type': 'string', 'index': 'not_analyzed' },
},
}
I then enter it into ES with:
es.index(
index=Visit_to_ElasticSearch.INDEX,
doc_type=Visit_to_ElasticSearch.DOC_TYPE,
id=self.uniqID,
timestamp=int(math.floor(self._visit['Entrance Time'])),
body=visit
)
When I look at the data in the index on ES only Entrance Time, _id, _type, domain, and uniqID are indexed for searching (according to Kibana). All of the data is present in the document but most of the fields show "Unindexed fields can not be searched."
Additionally, I was attempting to just get a Pie chart of the Agents. But I couldn't figure out to get visualized because no matter what boxes I click on the Agent field is never an option for aggregation. Just mentioned it because it seems the fields which are indexed do show up.
I've attempting to mimic the mapping examples in the elasticsearch.py example which pulls in github. Can someone correct me on how I'm using that map?
Thanks
------------ Mapping -------------
{
"visits": {
"mappings": {
"visit": {
"properties": {
"Agent": {
"type": "string"
},
"Entrance Time": {
"type": "date",
"format": "dateOptionalTime"
},
"Raw": {
"properties": {
"Entrance Time": {
"type": "double"
},
"domain": {
"type": "string"
},
"uniqID": {
"type": "string"
}
}
},
"Referrer": {
"type": "string"
},
"Request Time": {
"type": "string"
},
"Srvr IP": {
"type": "string"
},
"Visitor IP": {
"type": "string"
},
"domain": {
"type": "string"
},
"uniqID": {
"type": "string"
}
}
}
}
}
}
------------- Update and New Mapping -----------
So I deleted the index and recreated it. The original index had some data in it from before I knew anything about mapping the data to specific field types. This seemed to fix the issue with only a few fields being indexed.
However, parts of my mapping appear to be ignored. Specifically the Agent string mapping:
visit_mapping = {
'properties': {
'uniqID': str_not_analyzed,
'pages': str_not_analyzed,
'domain': str_not_analyzed,
'Srvr IP': str_not_analyzed,
'Visitor IP': str_not_analyzed,
'Agent': { 'type': 'string', 'index': 'not_analyzed' },
'Referrer': { 'type': 'string' },
'Entrance Time': time_date_mapping,
'Request Time': time_date_mapping,
'Raw': { 'type': 'string', 'index': 'not_analyzed' },
},
}
Here's the output of http://localhost:9200/visits_test2/_mapping
{
"visits_test2": {
"mappings": {
"visit": {
"properties": {
"Agent":{"type":"string"},
"Entrance Time": {"type":"date","format":"dateOptionalTime"},
"Raw": {
"properties": {
"Entrance Time":{"type":"double"},
"domain":{"type":"string"},
"uniqID":{"type":"string"}
}
},
"Referrer":{"type":"string"},
"Request Time": {"type":"date","format":"dateOptionalTime"},
"Srvr IP":{"type":"string"},
"Visitor IP":{"type":"string"},
"domain":{"type":"string"},
"uniqID":{"type":"string"}
}
}
}
}
}
Note that I've used an entirely new index. The reason being that I wanted to make to sure nothing was carrying over from one to the next.
Note that I'm using the Python library elasticsearch.py and following their examples for mapping syntax.
--------- Python Code for Entering Data into ES, per comment request -----------
Below is a file name mapping.py, I have not yet fully commented the code since this was just code to test whether this method of data entry into ES was viable. If it is not self-explanatory, let me know and I'll add additional comments.
Note, I programmed in PHP for years before picking up Python. In order to get up and running faster with Python I created a couple of files with basic string and file manipulation functions and made them into a package. They are written in Python and meant to mimic the behavior of a built-in PHP function. So when you see a call to php_basic_* it is one of those functions.
# Standard Library Imports
import json, copy, datetime, time, enum, os, sys, numpy, math
from datetime import datetime
from enum import Enum, unique
from elasticsearch import Elasticsearch
# My Library
import basicconfig, mybasics
from mybasics.cBaseClass import BaseClass, BaseClassErrors
from mybasics.cHelpers import HandleErrors, LogLvl
# This imports several constants, a couple of functions, and a helper class
from basicconfig.startup_config import *
# Connect to ElasticSearch
es = Elasticsearch([{'host': 'localhost', 'port': '9200'}])
# Create mappings of a visit
time_date_mapping = { 'type': 'date_time' }
str_not_analyzed = { 'type': 'string'} # This originally included 'index': 'not_analyzed' as well
visit_mapping = {
'properties': {
'uniqID': str_not_analyzed,
'pages': str_not_analyzed,
'domain': str_not_analyzed,
'Srvr IP': str_not_analyzed,
'Visitor IP': str_not_analyzed,
'Agent': { 'type': 'string', 'index': 'not_analyzed' },
'Referrer': { 'type': 'string' },
'Entrance Time': time_date_mapping,
'Request Time': time_date_mapping,
'Raw': { 'type': 'string', 'index': 'not_analyzed' },
'Pages': { 'type': 'string', 'index': 'not_analyzed' },
},
}
class Visit_to_ElasticSearch(object):
"""
"""
INDEX = 'visits'
DOC_TYPE = 'visit'
def __init__(self, fname, index=True):
"""
"""
self._visit = json.loads(php_basic_files.file_get_contents(fname))
self._pages = self._visit.pop('pages')
self.uniqID = self._visit['uniqID']
self.domain = self._visit['domain']
self.entrance_time = self._convert_time(self._visit['Entrance Time'])
# Get a list of the page IDs
self.pages = self._pages.keys()
# Extra IPs and such from a single page
page = self._pages[self.pages[0]]
srvr = page['SERVER']
req = page['REQUEST']
self.visitor_ip = srvr['REMOTE_ADDR']
self.srvr_ip = srvr['SERVER_ADDR']
self.request_time = self._convert_time(srvr['REQUEST_TIME'])
self.agent = srvr['HTTP_USER_AGENT']
# Now go grab data that might not be there...
self._extract_optional()
if index is True:
self.index_with_elasticsearch()
def _convert_time(self, ts):
"""
"""
try:
dt = datetime.fromtimestamp(ts)
except TypeError:
dt = datetime.fromtimestamp(float(ts))
return dt.strftime('%Y-%m-%dT%H:%M:%S')
def _extract_optional(self):
"""
"""
self.referrer = ''
def index_with_elasticsearch(self):
"""
"""
visit = {
'uniqID': self.uniqID,
'pages': [],
'domain': self.domain,
'Srvr IP': self.srvr_ip,
'Visitor IP': self.visitor_ip,
'Agent': self.agent,
'Referrer': self.referrer,
'Entrance Time': self.entrance_time,
'Request Time': self.request_time,
'Raw': self._visit,
'Pages': php_basic_str.implode(', ', self.pages),
}
es.index(
index=Visit_to_ElasticSearch.INDEX,
doc_type=Visit_to_ElasticSearch.DOC_TYPE,
id=self.uniqID,
timestamp=int(math.floor(self._visit['Entrance Time'])),
body=visit
)
es.indices.create(
index=Visit_to_ElasticSearch.INDEX,
body={
'settings': {
'number_of_shards': 5,
'number_of_replicas': 1,
}
},
# ignore already existing index
ignore=400
)
In case it matters this is the simple loop I use to dump the data into ES:
for f in all_files:
try:
visit = mapping.Visit_to_ElasticSearch(f)
except IOError:
pass
where all_files is a list of all the visit files (full path) I have in my test data set.
Here is a sample visit file from a Google Bot visit:
{u'Entrance Time': 1407551587.7385,
u'domain': u'############',
u'pages': {u'6818555600ccd9880bf7acef228c5d47': {u'REQUEST': [],
u'SERVER': {u'DOCUMENT_ROOT': u'/var/www/####/',
u'Entrance Time': 1407551587.7385,
u'GATEWAY_INTERFACE': u'CGI/1.1',
u'HTTP_ACCEPT': u'*/*',
u'HTTP_ACCEPT_ENCODING': u'gzip,deflate',
u'HTTP_CONNECTION': u'Keep-alive',
u'HTTP_FROM': u'googlebot(at)googlebot.com',
u'HTTP_HOST': u'############',
u'HTTP_IF_MODIFIED_SINCE': u'Fri, 13 Jun 2014 20:26:33 GMT',
u'HTTP_USER_AGENT': u'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)',
u'PATH': u'/usr/local/bin:/usr/bin:/bin',
u'PHP_SELF': u'/index.php',
u'QUERY_STRING': u'',
u'REDIRECT_SCRIPT_URI': u'http://############/',
u'REDIRECT_SCRIPT_URL': u'############',
u'REDIRECT_STATUS': u'200',
u'REDIRECT_URL': u'############',
u'REMOTE_ADDR': u'############',
u'REMOTE_PORT': u'46271',
u'REQUEST_METHOD': u'GET',
u'REQUEST_TIME': u'1407551587',
u'REQUEST_URI': u'############',
u'SCRIPT_FILENAME': u'/var/www/PIAN/index.php',
u'SCRIPT_NAME': u'/index.php',
u'SCRIPT_URI': u'http://############/',
u'SCRIPT_URL': u'/############/',
u'SERVER_ADDR': u'############',
u'SERVER_ADMIN': u'admin#############',
u'SERVER_NAME': u'############',
u'SERVER_PORT': u'80',
u'SERVER_PROTOCOL': u'HTTP/1.1',
u'SERVER_SIGNATURE': u'<address>Apache/2.2.22 (Ubuntu) Server at ############ Port 80</address>\n',
u'SERVER_SOFTWARE': u'Apache/2.2.22 (Ubuntu)',
u'uniqID': u'bbc398716f4703cfabd761cc8d4101a1'},
u'SESSION': {u'Entrance Time': 1407551587.7385,
u'uniqID': u'bbc398716f4703cfabd761cc8d4101a1'}}},
u'uniqID': u'bbc398716f4703cfabd761cc8d4101a1'}
Now I understand better why the Raw field is an object instead of a simple string since it is assigned self._visit which in turn was initialized with json.loads(php_basic_files.file_get_contents(fname)).
Anyway, based on all the information you've given above, my take is that the mapping was never installed via put_mapping. From there on, there's no way anything else can work the way you like. I suggest you modify your code to install the mapping before you index your first visit document.

Mock Stripe Methods in Python for testing

So I am trying to mock all the stripe web hooks in the method so that I can write the Unit test for it. I am using the mock library for mocking the stripe methods. Here is the method I am trying to mock:
class AddCardView(APIView):
"""
* Add card for the customer
"""
permission_classes = (
CustomerPermission,
)
def post(self, request, format=None):
name = request.DATA.get('name', None)
cvc = request.DATA.get('cvc', None)
number = request.DATA.get('number', None)
expiry = request.DATA.get('expiry', None)
expiry_month, expiry_year = expiry.split("/")
customer_obj = request.user.contact.business.customer
customer = stripe.Customer.retrieve(customer_obj.stripe_id)
try:
card = customer.sources.create(
source={
"object": "card",
"number": number,
"exp_month": expiry_month,
"exp_year": expiry_year,
"cvc": cvc,
"name": name
}
)
# making it the default card
customer.default_source = card.id
customer.save()
except CardError as ce:
logger.error("Got CardError for customer_id={0}, CardError={1}".format(customer_obj.pk, ce.json_body))
return Response({"success": False, "error": "Failed to add card"})
else:
customer_obj.card_last_4 = card.get('last4')
customer_obj.card_kind = card.get('type', '')
customer_obj.card_fingerprint = card.get('fingerprint')
customer_obj.save()
return Response({"success": True})
This is the method for unit testing:
#mock.patch('stripe.Customer.retrieve')
#mock.patch('stripe.Customer.create')
def test_add_card(self,create_mock,retrieve_mock):
response = {
'default_card': None,
'cards': {
"count": 0,
"data": []
}
}
# save_mock.return_value = response
create_mock.return_value = response
retrieve_mock.return_value = response
self.api_client.client.login(username = self.username, password = self.password)
res = self.api_client.post('/biz/api/auth/card/add')
print res
Now stripe.Customer.retrieve is being mocked properly. But I am not able to mock customer.sources.create. I am really stuck on this.
This is the right way of doing it:
#mock.patch('stripe.Customer.retrieve')
def test_add_card_failure(self, retrieve_mock):
data = {
'name': "shubham",
'cvc': 123,
'number': "4242424242424242",
'expiry': "12/23",
}
e = CardError("Card Error", "", "")
retrieve_mock.return_value.sources.create.return_value = e
self.api_client.client.login(username=self.username, password=self.password)
res = self.api_client.post('/biz/api/auth/card/add', data=data)
self.assertEqual(self.deserialize(res)['success'], False)
Even though the given answer is correct, there is a way more comfortable solution using vcrpy. That is creating a cassette (record) once a given record does not exist yet. When it does, the mocking is done transparently and the record will be replayed. Beautiful.
Having a vanilla pyramid application, using py.test, my test now looks like this:
import vcr
# here we have some FactoryBoy fixtures
from tests.fixtures import PaymentServiceProviderFactory, SSOUserFactory
def test_post_transaction(sqla_session, test_app):
# first we need a PSP and a User existent in the DB
psp = PaymentServiceProviderFactory() # type: PaymentServiceProvider
user = SSOUserFactory()
sqla_session.add(psp, user)
sqla_session.flush()
with vcr.use_cassette('tests/casettes/tests.checkout.services.transaction_test.test_post_transaction.yaml'):
# with that PSP we create a new PSPTransaction ...
res = test_app.post(url='/psps/%s/transaction' % psp.id,
params={
'token': '4711',
'amount': '12.44',
'currency': 'EUR',
})
assert 201 == res.status_code
assert 'id' in res.json_body
IMO, the following method is better than the rest of the answers
import unittest
import stripe
import json
from unittest.mock import patch
from stripe.http_client import RequestsClient # to mock the request session
stripe.api_key = "foo"
stripe.default_http_client = RequestsClient() # assigning the default HTTP client
null = None
false = False
true = True
charge_resp = {
"id": "ch_1FgmT3DotIke6IEFVkwh2N6Y",
"object": "charge",
"amount": 1000,
"amount_captured": 1000,
"amount_refunded": 0,
"billing_details": {
"address": {
"city": "Los Angeles",
"country": "USA",
},
"email": null,
"name": "Jerin",
"phone": null
},
"captured": true,
}
def get_customer_city_from_charge(stripe_charge_id):
# this is our function and we are writing unit-test for this function
charge_response = stripe.Charge.retrieve("foo-bar")
return charge_response.billing_details.address.city
class TestStringMethods(unittest.TestCase):
#patch("stripe.default_http_client._session")
def test_get_customer_city_from_charge(self, mock_session):
mock_response = mock_session.request.return_value
mock_response.content.decode.return_value = json.dumps(charge_resp)
mock_response.status_code = 200
city_name = get_customer_city_from_charge("some_id")
self.assertEqual(city_name, "Los Angeles")
if __name__ == '__main__':
unittest.main()
Advantages of this method
You can generate the corresponding class objects (here, the charge_response variable is a type of Charge--(source code))
You can use the dot (.) operator over the response (as we can do with real stripe SDK)
dot operator support for deep attributes