flask-mongoengine AttributeError: 'BaseQuerySet' object has no attribute 'service_name' - flask

My Code:
models.py
from flask_mongoengine import MongoEngine, QuerySet
import datetime
db = MongoEngine()
class Service(db.Document):
service_name = db.StringField(max_length=50, required=True)
date_created = db.DateTimeField(default=datetime.datetime.utcnow)
meta = {
'ordering': ['-date_created'], 'strict' : False
}
class Organisation(db.Document):
org_name = db.StringField(max_length=50, required = True)
service = db.ReferenceField(Service, reverse_delete_rule='CASCADE', required=True)
date_created = db.DateTimeField(default=datetime.datetime.utcnow)
meta = {
'ordering': ['-date_created'], 'strict' : False
}
API(routes.py)
#service.route('/services/<name>', methods=['GET'])
def get_one_service(name):
s = Service.objects(service_name=name)
if s:
output = {'service_name' : s.service_name}
else:
output = "No such name"
return jsonify({'result' : output})
The get_one_service() method returns the above mentioned error. As per the documentation, this is correct.

In your get_one_service function, s is not a Service instance but a queryset (this example from the documentation might make it clearer)
You can access a single instance of Service by doing something like:
try
s = Service.objects.get(service_name=name)
output = {'service_name' : s.service_name}
except Service.DoesNotExist:
output = 'no such name'
Though, since the service_name field is not unique in your model, you may get a MultipleObjectsReturned exception if there's two documents with the same service_name.
Alternatively, you can see if s contains multiple Services and act accordingly.

Related

Flask app-builder how to make REST API with file items

I'm making a REST api that files can be uploaded based in MODEL-VIEW in flask-appbuilder like this.
But I don't know how to call REST API (POST /File).
I tried several different ways. but I couldn't.
Let me know the correct or the alternative ways.
[client code]
file = {'file':open('test.txt', 'rb'),'description':'test'}
requests.post(url, headers=headers, files=file)
==> Failed
model.py
class Files(Model):
__tablename__ = "project_files"
id = Column(Integer, primary_key=True)
file = Column(FileColumn, nullable=False)
description = Column(String(150))
def download(self):
return Markup(
'<a href="'
+ url_for("ProjectFilesModelView.download", filename=str(self.file))
+ '">Download</a>'
)
def file_name(self):
return get_file_original_name(str(self.file))
view.py
class FileApi(ModelRestApi):
resource_name = "File"
datamodel = SQLAInterface(Files)
allow_browser_login = True
appbuilder.add_api(FileApi)
FileColumn is only a string field that saves the file name in the database. The actual file is saved to config['UPLOAD_FOLDER'].
This is taken care of by flask_appbuilder.filemanager.FileManager.
Furthermore, ModelRestApi assumes that you are POSTing JSON data. In order to upload files, I followed Flask's documentation, which suggests to send a multipart/form-data request. Because of this, one needs to override ModelRestApi.post_headless().
This is my solution, where I also make sure that when a Files database row
is deleted, so is the relative file from the filesystem.
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder.api import ModelRestApi
from flask_appbuilder.const import API_RESULT_RES_KEY
from flask_appbuilder.filemanager import FileManager
from flask import current_app, request
from marshmallow import ValidationError
from sqlalchemy.exc import IntegrityError
from app.models import Files
class FileApi(ModelRestApi):
resource_name = "file"
datamodel = SQLAInterface(Files)
def post_headless(self):
if not request.form or not request.files:
msg = "No data"
current_app.logger.error(msg)
return self.response_400(message=msg)
file_obj = request.files.getlist('file')
if len(file_obj) != 1:
msg = ("More than one file provided.\n"
"Please upload exactly one file at a time")
current_app.logger.error(msg)
return self.response_422(message=msg)
else:
file_obj = file_obj[0]
fm = FileManager()
uuid_filename = fm.generate_name(file_obj.filename, file_obj)
form = request.form.to_dict(flat=True)
# Add the unique filename provided by FileManager, which will
# be saved to the database. The original filename can be
# retrieved using
# flask_appbuilder.filemanager.get_file_original_name()
form['file'] = uuid_filename
try:
item = self.add_model_schema.load(
form,
session=self.datamodel.session)
except ValidationError as err:
current_app.logger.error(err)
return self.response_422(message=err.messages)
# Save file to filesystem
fm.save_file(file_obj, item.file)
try:
self.datamodel.add(item, raise_exception=True)
return self.response(
201,
**{API_RESULT_RES_KEY: self.add_model_schema.dump(
item, many=False),
"id": self.datamodel.get_pk_value(item),
},
)
except IntegrityError as e:
# Delete file from filesystem if the db record cannot be
# created
fm.delete_file(item.file)
current_app.logger.error(e)
return self.response_422(message=str(e.orig))
def pre_delete(self, item):
"""
Delete file from filesystem before removing the record from the
database
"""
fm = FileManager()
current_app.logger.info(f"Deleting {item.file} from filesystem")
fm.delete_file(item.file)
You can use this.
from app.models import Project, ProjectFiles
class DataFilesModelView(ModelView):
datamodel = SQLAInterface(ProjectFiles)
label_columns = {"file_name": "File Name", "download": "Download"}
add_columns = ["file", "description", "project"]
edit_columns = ["file", "description", "project"]
list_columns = ["file_name", "download"]
show_columns = ["file_name", "download"]
Last add the view to the menu.
appbuilder.add_view(DataFilesModelView,"File View")

How to change serializer field name when validation error is triggered

I need to change the view of the error displayed when I validate the field.
serializer.py
class ElementCommonInfoSerializer(serializers.ModelSerializer):
self_description = serializers.CharField(required=False, allow_null=True,
validators=[RegexValidator(regex=r'^[a-zA-Z0-9,.!? -/*()]*$',
message='The system detected that the data is not in English. '
'Please correct the error and try again.')]
)
....
class Meta:
model = Elements
fields = ('self_description',......)
This error is displayed
{
"self_description": [
"The system detected that the data is not in English. Please correct the error and try again."
]
}
The key of error dict is field name - self_description. For FE I need to send another format like:
{
"general_errors": [
"The system detected that the data is not in English. Please correct the error and try again."
]
}
How to change this?
One way this could be achieved is via custom exception handler
from copy import deepcopy
from rest_framework.views import exception_handler
def genelalizing_exception_handler(exc, context):
# Call REST framework's default exception handler first,
# to get the standard error response.
response = exception_handler(exc, context)
# Now add the HTTP status code to the response.
if 'self_description' in response.data:
data = deepcopy(response.data)
general_errors = data.pop('self_description')
data['general_errors'] = general_errors
response.data = data
return response
in settings
REST_FRAMEWORK = {
'EXCEPTION_HANDLER': 'my_project.my_app.utils. genelalizing_exception_handler'
}
Another solution is to rewrite the validate method.
def validate(self, data):
self_description = str((data['self_description']))
analyst_notes = str((data['analyst_notes']))
if re.match(r'^[a-zA-Z0-9,.!? -/*()]*$', self_description) or re.match(r'^[a-zA-Z0-9,.!? -/*()]*$', analyst_notes):
raise serializers.ValidationError({
"general_errors": [
"The system detected that the data is not in English. Please correct the error and try again."
]
})
return data
The solution is very simple.
you can rename the key field by using serializer method (source attribute)
below you can find an example code.
class QuestionSerializer(serializers.ModelSerializer):
question_importance = serializers.IntegerField(source='importance')
question_importance = serializers.IntegerField(required=False)
class Meta:
model = create_question
fields = ('id','question_importance','complexity','active')
Above you can see I have an importance field which is present in django model But here I renamed this field to question_importance by using source attribute .
In your case it will be like below,
class ElementCommonInfoSerializer(serializers.ModelSerializer):
general_errors = serializer.CharField(source="self_description")
general_error = serializers.CharField(required=False, allow_null=True,
validators=[])
class Meta:
model = Elements
fields = ('general_error',......)

Django Admin: how to display a url as a link while calling specific function to download the file

Title is a bit confusing, but basically I have an s3 path stored as a string
class S3Stuff(Model):
s3_path = CharField(max_length=255, blank=True, null=True)
# rest is not important
There are existing methods to download the content given the url, so I want to utilize that
def download_from_s3(bucket, file_name):
s3_client = boto3.client(bleh_bleh)
s3_response = s3_client.get_object(Bucket=s3_bucket, Key=file_name)
return {'response': 200, 'body': s3_response['Body'].read()}
s3_path can be broken into bucket and file_name. This works very easily when I use my own frontend because I can do whatever I want with it, but I don't know how to apply this to admin
class S3StuffAdmin(admin.StackedInline):
model = S3Stuff
fields = ('s3_path', )
Now how do I call that method and make the display a link that says "download"
I don't think this function will be much useful for generating download links, instead use the boto3's presigned_url like this:
from django.utils.html import format_html
class S3StuffAdmin(admin.StackedInline):
model = S3Stuff
fields = ('s3_path', )
readonly_field = ('download',)
def download(self, obj):
s3_client = boto3.client(bleh_bleh)
url = s3_client.generate_presigned_url('get_object', Params = {'Bucket': 'bucket', 'Key': obj.s3_path}, ExpiresIn = 100)
return format_html('<a href={}>download</a>'.format(url))

Django default data throw an error during migration

I'm using Django 1.7
class MyModel(models.Model):
my_random_field_1 = models.ForeignKey(
MyOtherModel, null=True, blank=True, related_name="random_1", default=get_random_1
)
my_random_field_2 = models.ForeignKey(
MyOtherModel, null=True, blank=True, related_name="random_2", default=get_random_2
)
And 'random functions':
def get_random_1():
ob = MyOtherModel.objects.filter(...some filtering...)
try:
x = ob[0]
return x
except:
return None
def get_random_2():
ob = MyOtherModel.objects.filter(...some other filtering...)
try:
x = ob[1]
return x
except:
return None
And when I'm trying to migrate I gave this error:
TypeError: int() argument must be a string, a bytes-like object or a number, not 'MyOtherModel'
Sentry is attempting to send 2 pending error messages
Waiting up to 10 seconds
But after that, when I open admin panel and go to MyOtherModel I have this random field, and they are properly init by 'ob[0]' and 'ob[1]'
To make this code work you should be sending instances primary key as a default, not instance itself.
def get_random_1():
ob = MyOtherModel.objects.filter(...some filtering...)
try:
x = ob[0]
return x.pk
except:
return None
def get_random_2():
ob = MyOtherModel.objects.filter(...some other filtering...)
try:
x = ob[1]
return x.pk
except:
return None
But mind you that this value will stay "baked" in your migrations file and all instances that are in your db at the time of migration (old data for instance) will get that one single value so maybe this is not what you wante.
Newer versions of Django don't even allow such a thing as baking in an object instance into migration file :D
ValueError: Cannot serialize: <Model: instance name>
There are some values Django cannot serialize into migration files.

GeoDJango: retrieve last inserted primary key from LayerMapping

I am building an application with GeoDjango and I have the following problem:
I need to read track data from a GPX file and those data should be stored in a model MultiLineStringField field.
This should happen in the admin interface, where the user uploads a GPX file
I am trying to achieve this, namely that the data grabbed from the file should be assigned to the MultiLineStringField, while the other fields should get values from the form.
My model is:
class GPXTrack(models.Model):
nome = models.CharField("Nome", blank = False, max_length = 255)
slug = models.SlugField("Slug", blank = True)
# sport natura arte/cultura
tipo = models.CharField("Tipologia", blank = False, max_length = 2, choices=TIPOLOGIA_CHOICES)
descrizione = models.TextField("Descrizione", blank = True)
gpx_file = models.FileField(upload_to = 'uploads/gpx/')
track = models.MultiLineStringField(blank = True)
objects = models.GeoManager()
published = models.BooleanField("Pubblicato")
rel_files = generic.GenericRelation(MyFiles)
#publish_on = models.DateTimeField("Pubblicare il", auto_now_add = True)
created = models.DateTimeField("Created", auto_now_add = True)
updated = models.DateTimeField("Updated", auto_now = True)
class Meta:
#verbose_name = "struttura'"
#verbose_name_plural = "strutture"
ordering = ['-created']
def __str__(self):
return str(self.nome)
def __unicode__(self):
return '%s' % (self.nome)
def put(self):
self.slug = sluggy(self.nome)
key = super(Foresta, self).put()
# do something after save
return key
While in the admin.py file I have overwritten the save method as follows:
from django.contrib.gis import admin
from trails.models import GPXPoint, GPXTrack
from django.contrib.contenttypes import generic
from django.contrib.gis.gdal import DataSource
#from gpx_mapping import GPXMapping
from django.contrib.gis.utils import LayerMapping
from django.template import RequestContext
import tempfile
import os
import pprint
class GPXTrackAdmin(admin.OSMGeoAdmin):
list_filter = ( 'tipo', 'published')
search_fields = ['nome']
list_display = ('nome', 'tipo', 'published', 'gpx_file')
inlines = [TrackImagesInline, TrackFilesInline]
prepopulated_fields = {"slug": ("nome",)}
def save_model(self, request, obj, form, change):
"""When creating a new object, set the creator field.
"""
if 'gpx_file' in request.FILES:
# Get
gpxFile = request.FILES['gpx_file']
# Save
targetPath = tempfile.mkstemp()[1]
destination = open(targetPath, 'wt')
for chunk in gpxFile.chunks():
destination.write(chunk)
destination.close()
#define fields of interest for LayerMapping
track_point_mapping = {'timestamp' : 'time',
'point' : 'POINT',
}
track_mapping = {'track' : 'MULTILINESTRING'}
gpx_file = DataSource(targetPath)
mytrack = LayerMapping(GPXTrack, gpx_file, track_mapping, layer='tracks')
mytrack.save()
#remove the temp file saved
os.remove(targetPath)
orig = GPXTrack.objects.get(pk=mytrack.pk)
#assign the parsed values from LayerMapping to the appropriate Field
obj.track = orig.track
obj.save()
As far as I know:
LayerMapping cannot be used to update a field but only to save a new one
I cannot access a specific field of the LayerMapping object (ie in the code above: mytrack.track) and assign its value to a model field (ie obj.track) in the model_save method
I cannot retrieve the primary key of the last saved LayerMapping object (ie in the code above: mytrack.pk) in order to update it with the values passed in the form for the field not mapped in LayerMapping.mapping
What can I do then?!?!
I sorted it out subclassing LayerMapping and adding a method get_values() that instead of saving the retrieved data, returns them for any use or manipulation.The get_values method is a copy of the LayerMapping::save() method that returns the values instead of saving them.
I am using django 1.5
import os
from django.contrib.gis.utils import LayerMapping
import sys
class MyMapping(LayerMapping):
def get_values(self, verbose=False, fid_range=False, step=False,
progress=False, silent=False, stream=sys.stdout, strict=False):
"""
Returns the contents from the OGR DataSource Layer
according to the mapping dictionary given at initialization.
Keyword Parameters:
verbose:
If set, information will be printed subsequent to each model save
executed on the database.
fid_range:
May be set with a slice or tuple of (begin, end) feature ID's to map
from the data source. In other words, this keyword enables the user
to selectively import a subset range of features in the geographic
data source.
step:
If set with an integer, transactions will occur at every step
interval. For example, if step=1000, a commit would occur after
the 1,000th feature, the 2,000th feature etc.
progress:
When this keyword is set, status information will be printed giving
the number of features processed and sucessfully saved. By default,
progress information will pe printed every 1000 features processed,
however, this default may be overridden by setting this keyword with an
integer for the desired interval.
stream:
Status information will be written to this file handle. Defaults to
using `sys.stdout`, but any object with a `write` method is supported.
silent:
By default, non-fatal error notifications are printed to stdout, but
this keyword may be set to disable these notifications.
strict:
Execution of the model mapping will cease upon the first error
encountered. The default behavior is to attempt to continue.
"""
# Getting the default Feature ID range.
default_range = self.check_fid_range(fid_range)
# Setting the progress interval, if requested.
if progress:
if progress is True or not isinstance(progress, int):
progress_interval = 1000
else:
progress_interval = progress
# Defining the 'real' save method, utilizing the transaction
# decorator created during initialization.
#self.transaction_decorator
def _get_values(feat_range=default_range, num_feat=0, num_saved=0):
if feat_range:
layer_iter = self.layer[feat_range]
else:
layer_iter = self.layer
for feat in layer_iter:
num_feat += 1
# Getting the keyword arguments
try:
kwargs = self.feature_kwargs(feat)
except LayerMapError, msg:
# Something borked the validation
if strict: raise
elif not silent:
stream.write('Ignoring Feature ID %s because: %s\n' % (feat.fid, msg))
else:
# Constructing the model using the keyword args
is_update = False
if self.unique:
# If we want unique models on a particular field, handle the
# geometry appropriately.
try:
# Getting the keyword arguments and retrieving
# the unique model.
u_kwargs = self.unique_kwargs(kwargs)
m = self.model.objects.using(self.using).get(**u_kwargs)
is_update = True
# Getting the geometry (in OGR form), creating
# one from the kwargs WKT, adding in additional
# geometries, and update the attribute with the
# just-updated geometry WKT.
geom = getattr(m, self.geom_field).ogr
new = OGRGeometry(kwargs[self.geom_field])
for g in new: geom.add(g)
setattr(m, self.geom_field, geom.wkt)
except ObjectDoesNotExist:
# No unique model exists yet, create.
m = self.model(**kwargs)
else:
m = self.model(**kwargs)
try:
# Attempting to save.
pippo = kwargs
num_saved += 1
if verbose: stream.write('%s: %s\n' % (is_update and 'Updated' or 'Saved', m))
except SystemExit:
raise
except Exception, msg:
if self.transaction_mode == 'autocommit':
# Rolling back the transaction so that other model saves
# will work.
transaction.rollback_unless_managed()
if strict:
# Bailing out if the `strict` keyword is set.
if not silent:
stream.write('Failed to save the feature (id: %s) into the model with the keyword arguments:\n' % feat.fid)
stream.write('%s\n' % kwargs)
raise
elif not silent:
stream.write('Failed to save %s:\n %s\nContinuing\n' % (kwargs, msg))
# Printing progress information, if requested.
if progress and num_feat % progress_interval == 0:
stream.write('Processed %d features, saved %d ...\n' % (num_feat, num_saved))
# Only used for status output purposes -- incremental saving uses the
# values returned here.
return pippo
nfeat = self.layer.num_feat
if step and isinstance(step, int) and step < nfeat:
# Incremental saving is requested at the given interval (step)
if default_range:
raise LayerMapError('The `step` keyword may not be used in conjunction with the `fid_range` keyword.')
beg, num_feat, num_saved = (0, 0, 0)
indices = range(step, nfeat, step)
n_i = len(indices)
for i, end in enumerate(indices):
# Constructing the slice to use for this step; the last slice is
# special (e.g, [100:] instead of [90:100]).
if i + 1 == n_i: step_slice = slice(beg, None)
else: step_slice = slice(beg, end)
try:
pippo = _get_values(step_slice, num_feat, num_saved)
beg = end
except:
stream.write('%s\nFailed to save slice: %s\n' % ('=-' * 20, step_slice))
raise
else:
# Otherwise, just calling the previously defined _save() function.
return _get_values()
In a custom save or save_model method you can then use:
track_mapping = {'nome': 'name',
'track' : 'MULTILINESTRING'}
targetPath = "/my/gpx/file/path.gpx"
gpx_file = DataSource(targetPath)
mytrack = MyMapping(GPXTrack, gpx_file, track_mapping, layer='tracks')
pippo = mytrack.get_values()
obj.track = pippo['track']