How to choose a default option out of few multiple options while creating document? - flask

Since I am new to flask-pymongo. I want to design my database such that there are a few specific multiple options out of which one is chosen to be the default value. How do I do that?
I did not find any option to do that.
Example:
For the field Status, multiple options would be:
Active
Inactive
Locked
The default value to be chosen would be Active.

If you use classes with enumerations, this will aid your goal. The following works in Python 3.7. The nice thing is you can add to the Options list easily without having to rework any code.
from typing import Optional
from enum import Enum
from time import sleep
from pymongo import MongoClient
connection = MongoClient('localhost', 27017)
db = connection['yourdatabase']
# Define the enumerated list of options
class Options(Enum):
ACTIVE = 'Active'
INACTIVE = 'Inactive'
LOCKED = 'Locked'
# Define the class for the object
class StockItem:
def __init__(self, stock_item, status = None) -> None:
self.stock_item: str = stock_item
self.status: Optional[Options] = status
# Check if the status is set; if not set it to the default (Active)
if self.status is None:
self.status = Options.ACTIVE
# Check the status is valid
if self.status not in Options:
raise ValueError (f'"{str(status)}" is not a valid Status')
# The to_dict allows us to manipulate the output going to the DB
def to_dict(self) -> dict:
return {
"StockItem": self.stock_item,
"Status": self.status.value # Use status.value to get the string value to store in the DB
}
# The insert is now easy as we've done all the hard work earlier
def insert(self, db) -> None:
db.stockitem.insert_one(self.to_dict())
# Note item 2 does note have a specific status set, this will default to Active
item1 = StockItem('Apples', Options.ACTIVE)
item1.insert(db)
item2 = StockItem('Bananas')
item2.insert(db)
item3 = StockItem('Cheese', Options.INACTIVE)
item3.insert(db)
item4 = StockItem('Dog Food', Options.LOCKED)
item4.insert(db)
for record in db.stockitem.find({}, {'_id': 0}):
print (record)
# The final item will fail as the status is invalid
sleep(5)
item5 = StockItem('Eggs', 'Invalid Status')
item5.insert(db)

Related

How to optimize a server request that has to send back several items with dynamic attributes that need to be calculated every time a user requests it?

I have an Angular UI app connecting to a Django API that uses GraphQL (using Graphene) and Postgres for DB.
My application has many courses and each course can have several chapters. The users signing in can see access courses and not others because a course could have a prerequisite. So they will see a course listed but it will be "locked" for them and a message will say that they need to complete the particular prerequisite before it can be accessed. Like this, we need some other attributes to be sent along with the list of courses:-
'locked' - Boolean - whether a course is locked for the current logged-in user or not.
'status' - ENUM - PENDING/SUBMITTED/GRADED/RETURNED/FLAGGED
'completed' - Boolean - whether the course is completed or not
When a user requests the list of courses, these 3 attributes are calculated for each item in the list before it is compiled and sent back to the user.
And this is done for each of the chapters inside the course too. And the chapter might contain upto 30 chapters or so. So this really takes a LOT of time!
I've implemented caching as well, but because these values change often (eg. when the user completes a chapter) they are constantly invalidated and it doesn't make sense to keep these attributes server-side cached to begin with.
Here's the code for how the chapters are processed for the query for list of chapters:-
#login_required
#user_passes_test(lambda user: has_access(user, RESOURCES['CHAPTER'], ACTIONS['LIST']))
def resolve_chapters(root, info, course_id=None, searchField=None, limit=None, offset=None, **kwargs):
current_user = info.context.user
# Checking if this is cached
cache_entity = CHAPTER_CACHE[0]
cache_key = generate_chapters_cache_key(cache_entity, searchField, limit, offset, course_id, current_user)
cached_response = fetch_cache(cache_entity, cache_key)
if cached_response:
return cached_response
# If not cached...
qs = rows_accessible(current_user, RESOURCES['CHAPTER'], {'course_id': course_id})
if searchField is not None:
filter = (
Q(searchField__icontains=searchField.lower())
)
qs = qs.filter(filter)
if offset is not None:
qs = qs[offset:]
if limit is not None:
qs = qs[:limit]
set_cache(cache_entity, cache_key, qs)
return qs
And I'm using this code to dynamically insert the three attributes into each item in the list of chapters that the above code returns:-
class ChapterType(DjangoObjectType):
completed = graphene.Boolean()
completion_status = graphene.String()
locked = graphene.String()
def resolve_completed(self, info):
user = info.context.user
completed = CompletedChapters.objects.filter(participant_id=user.id, chapter_id=self.id).exists()
return completed
def resolve_completion_status(self, info):
user = info.context.user
status = ExerciseSubmission.StatusChoices.PENDING
try:
completed = CompletedChapters.objects.get(participant_id=user.id, chapter_id=self.id)
status = completed.status
except:
pass
return status
def resolve_locked(self, info):
user = info.context.user
locked = is_chapter_locked(user, self)
return locked
class Meta:
model = Chapter
And the method is_chapter_locked() is quite complex in itself:-
def is_chapter_locked(user, chapter):
locked = None
# Letting the user see it if they are a grader
user_role = user.role.name;
grader = user_role == USER_ROLES_NAMES['GRADER']
# Checking if the user is the author of the course or a grader
if chapter.course.instructor.id == user.id or grader:
# If yes, we mark it as unlocked
return locked
course_locked = is_course_locked(user, chapter.course) # Checking if this belongs to a course that is locked
if course_locked:
# If the course is locked, we immediately return locked is true
locked = 'This chapter is locked for you'
return locked
# If the course is unlocked we
completed_chapters = CompletedChapters.objects.all().filter(participant_id=user.id)
required_chapters = MandatoryChapters.objects.all().filter(chapter_id=chapter.id)
required_chapter_ids = required_chapters.values_list('requirement_id',flat=True)
completed_chapter_ids = completed_chapters.values_list('chapter_id',flat=True)
pending_chapter_ids = []
for id in required_chapter_ids:
if id not in completed_chapter_ids:
pending_chapter_ids.append(id)
if pending_chapter_ids:
locked = 'To view this chapter, you must have completed '
pending_chapters_list = ''
for id in pending_chapter_ids:
try:
chapter= Chapter.objects.get(pk=id, active=True)
if pending_chapters_list != '':
pending_chapters_list += ', '
pending_chapters_list += '"' + str(chapter.section.index) +'.'+str(chapter.index)+'. '+chapter.title +'"'
except:
pass
locked += pending_chapters_list
return locked
As can be seen, there is a lot of dynamic processing that is done for fetching the list of chapters. And this is taking a considerably long time, even with caching of the query from the database before the dynamic attributes are calculated.
I am looking for strategies to minimize the dynamic calculation. What kind of an approach works best for performance optimizations in situations like this?
Thank you.

How change the Connection Arguments (after, before) in graphene-python (relay)?

Using:
Django 3.x [ Django-Filters 2.2.0, graphene-django 2.8.0, graphql-relay 2.0.1 ]
Vue 2.x [ Vue-Apollo ]
After applying some filters (iContains etc.) on my graphQL search i tried to change or manipulate the connection_args like firstor after. I can fetch a Dictionary on my resolver like {'first': 2, 'name__icontains': 'eagle'} with values i put in the IDE. As you can see (Example 1 /def resolve_all_birds2) i use that already for a logic. But i do not understand where do manipulate the GraphQLArgument states of the before. after first. last function which comes with relay?
Example 1
class ExtendedConnection(Connection):
class Meta:
abstract = True
total_count = Int()
edge_count = Int()
def resolve_total_count(root, info, **kwargs):
return root.length
def resolve_edge_count(root, info, **kwargs):
return len(root.edges)
class Birds2Node(DjangoObjectType):
class Meta:
model = Birds
filter_fields = {
'id': ['exact', 'icontains'],
'name': ['exact', 'icontains', 'istartswith', 'iendswith'],
}
interfaces = (relay.Node, )
connection_class = ExtendedConnection
# --- CUSTOM FIELDS -->
# pkey = _db primary key
pKey = Int()
def resolve_pKey(parent, info):
return parent.pk
# qRank = Item Rank in Edge Array
qRank = Int()
def resolve_qRank(parent, info, **kwargs):
return info.path[2]
class Birds2Query(ObjectType):
birds2 = relay.Node.Field(Birds2Node)
all_birds2 = DjangoFilterConnectionField(Birds2Node)
def resolve_all_birds2(self, info, **kwargs):
if 'name__icontains' in kwargs:
nameIcon = kwargs['name__icontains']
nameIconBool = bool(nameIcon.strip()) # if blanks turns False
if nameIconBool == False: # has blanks
return Birds.objects.filter(name=None)
pass
if 'name__istartswith' in kwargs:
nameIsta = kwargs['name__istartswith']
nameIstaBool = bool(nameIsta.strip()) # if blanks turns False
if nameIstaBool == False: # has blanks
return Birds.objects.filter(name=None)
pass
return
For example, in my IDE i declare allBirds2(first: 2, name_Icontains: "a")... i can fetch these values with my resolver as a Dictionary via **kwargs`` or via args def resolve_all_birds2(self, info, first, name_icontains): so far so good, i can manipulate my ModelQuery and it returned only 2 per Edge.
But Imagine i want to change first: 2 to first: 10 in my BackEnd? Can i update the Dictionary? The Documentation means yes, but it seems strict related to the ObjectTypes (Fields) you resolve.
For Example i tried this...
Example 2
def resolve_all_birds2(self, info, **kwargs):
<...>
return {'first': '20', 'name__icontains': 'd' }
Output IDE: "message": "'dict' object has no attribute 'model'"
Example 3
def resolve_all_birds2(self, info, first, **kwargs):
<...>
return f'20, {first}!'
Output IDE: "message": "name 'first' is not defined",
Question
Unfortunately i found only parameter manipulation on the modelquery in the graphene-python docs.
So my Question is how can i manipulate - in my backend - the Values of the Fields before. after first. last, that relay offers and that are already useable in my IDE. Do i have to declare them extra in my DjangoObjectType or create a custom Node to manipulate and change the values after a user sends a request?
Adding a middleware would probably allow changing the input values after the request is made and before running the query. Graphene has an example at: https://docs.graphene-python.org/en/latest/execution/middleware/
However, it's not clear (to me) from the documentation which of the mentioned parameters would contain the first field you want to manipulate.
The middleware approach does not seem to be highly recommended, though, because this is an undesirable side effect: https://github.com/graphql-python/graphene/issues/1285

set time to live for each session separately KVsession flask

KBsession stores the session TTL based on PERMANENT_SESSION_LIFETIME is there a way to override this for specific sessions
EDIT:
so I have two different API for login I need to give any user login from one of them an infinite session TTL, the other one will take PERMANENT_SESSION_LIFETIME value
note: KBsession back-end is redis
I think the best way is use Session Interface to create specific processing. This is just an example, but I hope you can understand approach.
from flask import Flask, session as flask_session, jsonify
flask_app = Flask(__name__)
# just a few user types
UNIQUE_USER_TYPE = 'unique'
DEFAULT_USER_TYPE = 'default'
#flask_app.route('/login-default')
def login_default():
flask_session['user_type'] = DEFAULT_USER_TYPE
return 'login default done'
#flask_app.route('/login-unique')
def login_unique():
flask_session['user_type'] = UNIQUE_USER_TYPE
return 'login unique done'
#flask_app.route('/session-state')
def get_session_state():
return jsonify(dict(flask_session))
class UserTypeSessionInterface(SecureCookieSessionInterface):
def get_expiration_time(self, app, session):
"""
I just override method. Just demonstration.
It's called from save_session() and open_session()
"""
if session.get('user_type') == UNIQUE_USER_TYPE:
# set 1 hour for unique users
delta = datetime.utcnow() + timedelta(hours=1)
else:
# set 3 hour for default users
delta = datetime.utcnow() + timedelta(hours=3)
# add datetime data into session
session['lifetime'] = delta.strftime('%Y-%m-%dT%H:%M:%S')
return delta
# use our custom session implementation
flask_app.session_interface = UserTypeSessionInterface()
Now run server, open new private window, /login-default and /session-state:
# default behaviour
{
"lifetime": "2018-11-06T16:22:21",
"user_type": "default"
}
Open one more private window, /login-unique and /session-state:
# unique behaviour
{
"lifetime": "2018-11-06T14:25:17",
"user_type": "unique"
}
So, session store tool doesn't matter(redis, cassandra or something else). All what you need is just implement open_session() and save_session():
class YourSessionProcessor(SessionInterface):
def open_session(self, app, request):
# just do here all what you need
pass
def save_session(self, app, session, response):
# just do here all what you need
pass
flask_app.session_interface = YourSessionProcessor()
Also you can use custom session class(just an example):
from flask.sessions import SessionMixin
from werkzeug.datastructures import CallbackDict
class CustomSession(CallbackDict, SessionMixin):
def __init__(self, initial=None, sid=None):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update=on_update)
self.sid = sid
self.modified = False
# YourSessionProcessor
def open_session(self, app, request):
# you can find any useful data in request
# you can find all settings in app.config
sid = request.cookies.get(app.session_cookie_name)
# ... do here everything what you need
return CustomSession(sid=sid)
Hope this helps.

get() in Google Datastore doesn't work as intended

I'm building a basic blog from the Web Development course by Steve Hoffman on Udacity. This is my code -
import os
import webapp2
import jinja2
from google.appengine.ext import db
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
jinja_env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir), autoescape = True)
def datetimeformat(value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
jinja_env.filters['datetimeformat'] = datetimeformat
def render_str(template, **params):
t = jinja_env.get_template(template)
return t.render(params)
class Entries(db.Model):
title = db.StringProperty(required = True)
body = db.TextProperty(required = True)
created = db.DateTimeProperty(auto_now_add = True)
class MainPage(webapp2.RequestHandler):
def get(self):
entries = db.GqlQuery('select * from Entries order by created desc limit 10')
self.response.write(render_str('mainpage.html', entries=entries))
class NewPost(webapp2.RequestHandler):
def get(self):
self.response.write(render_str('newpost.html', error=""))
def post(self):
title = self.request.get('title')
body = self.request.get('body')
if title and body:
e = Entries(title=title, body=body)
length = db.GqlQuery('select * from Entries order by created desc').count()
e.put()
self.redirect('/newpost/' + str(length+1))
else:
self.response.write(render_str('newpost.html', error="Please type in a title and some content"))
class Permalink(webapp2.RequestHandler):
def get(self, id):
e = db.GqlQuery('select * from Entries order by created desc').get()
self.response.write(render_str('permalink.html', id=id, entry = e))
app = webapp2.WSGIApplication([('/', MainPage),
('/newpost', NewPost),
('/newpost/(\d+)', Permalink)
], debug=True)
In the class Permalink, I'm using the get() method on the query than returns all records in the descending order of creation. So, it should return the most recently added record. But when I try to add a new record, permalink.html (it's just a page with shows the title, the body and the date of creation of the new entry) shows the SECOND most recently added. For example, I already had three records, so when I added a fourth record, instead of showing the details of the fourth record, permalink.html showed me the details of the third record. Am I doing something wrong?
I don't think my question is a duplicate of this - Read delay in App Engine Datastore after put(). That question is about read delay of put(), while I'm using get(). The accepted answer also states that get() doesn't cause any delay.
This is because of eventual consistency used by default for GQL queries.
You need to read:
https://cloud.google.com/appengine/docs/python/datastore/data-consistency
https://cloud.google.com/appengine/docs/python/datastore/structuring_for_strong_consistency
https://cloud.google.com/datastore/docs/articles/balancing-strong-and-eventual-consistency-with-google-cloud-datastore/
search & read on SO and other source about strong & eventual consistency in Google Cloud Datastore.
You can specify read_policy=STRONG_CONSISTENCY for your query but it has associated costs that you should be aware of and take into account.

GeoDJango: retrieve last inserted primary key from LayerMapping

I am building an application with GeoDjango and I have the following problem:
I need to read track data from a GPX file and those data should be stored in a model MultiLineStringField field.
This should happen in the admin interface, where the user uploads a GPX file
I am trying to achieve this, namely that the data grabbed from the file should be assigned to the MultiLineStringField, while the other fields should get values from the form.
My model is:
class GPXTrack(models.Model):
nome = models.CharField("Nome", blank = False, max_length = 255)
slug = models.SlugField("Slug", blank = True)
# sport natura arte/cultura
tipo = models.CharField("Tipologia", blank = False, max_length = 2, choices=TIPOLOGIA_CHOICES)
descrizione = models.TextField("Descrizione", blank = True)
gpx_file = models.FileField(upload_to = 'uploads/gpx/')
track = models.MultiLineStringField(blank = True)
objects = models.GeoManager()
published = models.BooleanField("Pubblicato")
rel_files = generic.GenericRelation(MyFiles)
#publish_on = models.DateTimeField("Pubblicare il", auto_now_add = True)
created = models.DateTimeField("Created", auto_now_add = True)
updated = models.DateTimeField("Updated", auto_now = True)
class Meta:
#verbose_name = "struttura'"
#verbose_name_plural = "strutture"
ordering = ['-created']
def __str__(self):
return str(self.nome)
def __unicode__(self):
return '%s' % (self.nome)
def put(self):
self.slug = sluggy(self.nome)
key = super(Foresta, self).put()
# do something after save
return key
While in the admin.py file I have overwritten the save method as follows:
from django.contrib.gis import admin
from trails.models import GPXPoint, GPXTrack
from django.contrib.contenttypes import generic
from django.contrib.gis.gdal import DataSource
#from gpx_mapping import GPXMapping
from django.contrib.gis.utils import LayerMapping
from django.template import RequestContext
import tempfile
import os
import pprint
class GPXTrackAdmin(admin.OSMGeoAdmin):
list_filter = ( 'tipo', 'published')
search_fields = ['nome']
list_display = ('nome', 'tipo', 'published', 'gpx_file')
inlines = [TrackImagesInline, TrackFilesInline]
prepopulated_fields = {"slug": ("nome",)}
def save_model(self, request, obj, form, change):
"""When creating a new object, set the creator field.
"""
if 'gpx_file' in request.FILES:
# Get
gpxFile = request.FILES['gpx_file']
# Save
targetPath = tempfile.mkstemp()[1]
destination = open(targetPath, 'wt')
for chunk in gpxFile.chunks():
destination.write(chunk)
destination.close()
#define fields of interest for LayerMapping
track_point_mapping = {'timestamp' : 'time',
'point' : 'POINT',
}
track_mapping = {'track' : 'MULTILINESTRING'}
gpx_file = DataSource(targetPath)
mytrack = LayerMapping(GPXTrack, gpx_file, track_mapping, layer='tracks')
mytrack.save()
#remove the temp file saved
os.remove(targetPath)
orig = GPXTrack.objects.get(pk=mytrack.pk)
#assign the parsed values from LayerMapping to the appropriate Field
obj.track = orig.track
obj.save()
As far as I know:
LayerMapping cannot be used to update a field but only to save a new one
I cannot access a specific field of the LayerMapping object (ie in the code above: mytrack.track) and assign its value to a model field (ie obj.track) in the model_save method
I cannot retrieve the primary key of the last saved LayerMapping object (ie in the code above: mytrack.pk) in order to update it with the values passed in the form for the field not mapped in LayerMapping.mapping
What can I do then?!?!
I sorted it out subclassing LayerMapping and adding a method get_values() that instead of saving the retrieved data, returns them for any use or manipulation.The get_values method is a copy of the LayerMapping::save() method that returns the values instead of saving them.
I am using django 1.5
import os
from django.contrib.gis.utils import LayerMapping
import sys
class MyMapping(LayerMapping):
def get_values(self, verbose=False, fid_range=False, step=False,
progress=False, silent=False, stream=sys.stdout, strict=False):
"""
Returns the contents from the OGR DataSource Layer
according to the mapping dictionary given at initialization.
Keyword Parameters:
verbose:
If set, information will be printed subsequent to each model save
executed on the database.
fid_range:
May be set with a slice or tuple of (begin, end) feature ID's to map
from the data source. In other words, this keyword enables the user
to selectively import a subset range of features in the geographic
data source.
step:
If set with an integer, transactions will occur at every step
interval. For example, if step=1000, a commit would occur after
the 1,000th feature, the 2,000th feature etc.
progress:
When this keyword is set, status information will be printed giving
the number of features processed and sucessfully saved. By default,
progress information will pe printed every 1000 features processed,
however, this default may be overridden by setting this keyword with an
integer for the desired interval.
stream:
Status information will be written to this file handle. Defaults to
using `sys.stdout`, but any object with a `write` method is supported.
silent:
By default, non-fatal error notifications are printed to stdout, but
this keyword may be set to disable these notifications.
strict:
Execution of the model mapping will cease upon the first error
encountered. The default behavior is to attempt to continue.
"""
# Getting the default Feature ID range.
default_range = self.check_fid_range(fid_range)
# Setting the progress interval, if requested.
if progress:
if progress is True or not isinstance(progress, int):
progress_interval = 1000
else:
progress_interval = progress
# Defining the 'real' save method, utilizing the transaction
# decorator created during initialization.
#self.transaction_decorator
def _get_values(feat_range=default_range, num_feat=0, num_saved=0):
if feat_range:
layer_iter = self.layer[feat_range]
else:
layer_iter = self.layer
for feat in layer_iter:
num_feat += 1
# Getting the keyword arguments
try:
kwargs = self.feature_kwargs(feat)
except LayerMapError, msg:
# Something borked the validation
if strict: raise
elif not silent:
stream.write('Ignoring Feature ID %s because: %s\n' % (feat.fid, msg))
else:
# Constructing the model using the keyword args
is_update = False
if self.unique:
# If we want unique models on a particular field, handle the
# geometry appropriately.
try:
# Getting the keyword arguments and retrieving
# the unique model.
u_kwargs = self.unique_kwargs(kwargs)
m = self.model.objects.using(self.using).get(**u_kwargs)
is_update = True
# Getting the geometry (in OGR form), creating
# one from the kwargs WKT, adding in additional
# geometries, and update the attribute with the
# just-updated geometry WKT.
geom = getattr(m, self.geom_field).ogr
new = OGRGeometry(kwargs[self.geom_field])
for g in new: geom.add(g)
setattr(m, self.geom_field, geom.wkt)
except ObjectDoesNotExist:
# No unique model exists yet, create.
m = self.model(**kwargs)
else:
m = self.model(**kwargs)
try:
# Attempting to save.
pippo = kwargs
num_saved += 1
if verbose: stream.write('%s: %s\n' % (is_update and 'Updated' or 'Saved', m))
except SystemExit:
raise
except Exception, msg:
if self.transaction_mode == 'autocommit':
# Rolling back the transaction so that other model saves
# will work.
transaction.rollback_unless_managed()
if strict:
# Bailing out if the `strict` keyword is set.
if not silent:
stream.write('Failed to save the feature (id: %s) into the model with the keyword arguments:\n' % feat.fid)
stream.write('%s\n' % kwargs)
raise
elif not silent:
stream.write('Failed to save %s:\n %s\nContinuing\n' % (kwargs, msg))
# Printing progress information, if requested.
if progress and num_feat % progress_interval == 0:
stream.write('Processed %d features, saved %d ...\n' % (num_feat, num_saved))
# Only used for status output purposes -- incremental saving uses the
# values returned here.
return pippo
nfeat = self.layer.num_feat
if step and isinstance(step, int) and step < nfeat:
# Incremental saving is requested at the given interval (step)
if default_range:
raise LayerMapError('The `step` keyword may not be used in conjunction with the `fid_range` keyword.')
beg, num_feat, num_saved = (0, 0, 0)
indices = range(step, nfeat, step)
n_i = len(indices)
for i, end in enumerate(indices):
# Constructing the slice to use for this step; the last slice is
# special (e.g, [100:] instead of [90:100]).
if i + 1 == n_i: step_slice = slice(beg, None)
else: step_slice = slice(beg, end)
try:
pippo = _get_values(step_slice, num_feat, num_saved)
beg = end
except:
stream.write('%s\nFailed to save slice: %s\n' % ('=-' * 20, step_slice))
raise
else:
# Otherwise, just calling the previously defined _save() function.
return _get_values()
In a custom save or save_model method you can then use:
track_mapping = {'nome': 'name',
'track' : 'MULTILINESTRING'}
targetPath = "/my/gpx/file/path.gpx"
gpx_file = DataSource(targetPath)
mytrack = MyMapping(GPXTrack, gpx_file, track_mapping, layer='tracks')
pippo = mytrack.get_values()
obj.track = pippo['track']