I'm trying to create an app with a form that when submitted, updates a table in my database based on the info submitted, but I'm not sure how to go about it. Currently, I have a simple mode:
class Client(models.Model):
company_name = models.CharField(max_length=200)
launchpad_id = models.PositiveIntegerField()
client_email = models.EmailField()
content_id = models.CharField(max_length=200)
def __str__(self):
return self.company_name + ' | ' + self.content_id
and my databases configured like so:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'django_project',
'USER': 'xxx',
'PASSWORD': 'xxx',
'HOST': 'xxxx',
'PORT': 'xxx',
},
'info': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'reporting_database',
'USER': 'xxx',
'PASSWORD': 'xxx',
'HOST': 'xxx',
'PORT': 'xxx',
}
}
What I want to happen is when I submit my fields through the Client model either in admin or a template, it updates my client_info table in the reporting_database. I cant seem to figure out how to make that connection work though. I would appreciate any direction you can give me. Thanks.
You need to have a funx that makes it possible to create or update Model. You can follow the django doc:
https://docs.djangoproject.com/en/2.1/ref/models/instances/
Use a model form:
from your_app import Client
class ClientUpdateForm(forms.ModelForm):
class Meta:
model = Client
fields = ('company_name', 'launchpad_id', 'client_email', 'content_id')
def __init__(self, *args, **kwargs):
super(ClientUpdateForm, self).__init__(*args, **kwargs)
self.fields['company_name'].required = True
self.fields['launchpad_id'].required = True
self.fields['client_email'].required = True
self.fields['content_id'].required = True
def clean(self):
cleaned_data = super(ClientUpdateForm, self).clean()
company_name = cleaned_data.get('company_name')
launchpad_id = cleaned_data.get('launchpad_id')
client_email = cleaned_data.get('client_email')
content_id = cleaned_data.get('content_id')
Then inherit from UpdateView:
from django.views.generic import UpdateView
from your_app import Client
class ClientUpdateForm(UpdateView):
model = Client
form_class = ClientUpdateForm
template_name_suffix = '_update_form'
def form_valid(self, form):
if form.is_valid():
client = form.save(commit=False)
client.save()
return HttpResponseRedirect('/redirect/')
The template_name_suffix means you should call your template where you render the form client_update_form.html.
I think you need a database_router for this. Hold https://docs.djangoproject.com/en/2.1/topics/db/multi-db/. Basically you need to set up a DatabaseRouter where you set up from which database you will read the model table ecc. then set your Custom database route in django settings. And you will probably need to run migration for this model with something like this ./manage.py migrate myapp 0005_migration_to_run --databse=your_target_databse_name, I would probably recommend you having different apps inside project per database for take it easier.
I was able to achieve my desired results by adding the following code to my model:
def update_mysql(self):
cursor = db.cursor()
sql = "UPDATE tb_reporting_info SET client_email = '%s' WHERE content_id = '%s' AND launchpad_id = '%s';"
cursor.execute( sql % (self.client_email, self.content_id, self.launchpad_id))
db.commit()
I set my form action to action="{% url 'contact:addClient' %}" and my view to:
def addClient(request):
if request.method == 'POST':
# Set POST data to variables
company_name = request.POST['company_name']
launchpad_id = request.POST['launchpad_id']
content_id = request.POST['content_id']
client_email = request.POST['client_email']
client_added = True # Tells the template to render a success message
# Pass POST data to Client object
c = Client(company_name = company_name,
launchpad_id = launchpad_id,
content_id = content_id,
client_email = client_email
)
c.update_mysql()
It's bare bones but it works perfectly for what I need.
Related
I have a foreign key on my models like Patient, and Doctor, which point to a Clinic class. So, the Patient and Doctor are supposed to belong to this Clinic alone. Other Clinics should not be able to see any detail of these Models.
The models look like this:
class Clinic(models.Model):
clinicid = models.AutoField(primary_key=True, unique=True)
name = models.CharField(max_length=60, unique=True)
label = models.SlugField(max_length=25, unique=True)
email = models.EmailField(max_length=100, default='')
mobile = models.CharField(max_length=15, default='')
...
class Doctor(models.Model):
# Need autoincrement, unique and primary
docid = models.AutoField(primary_key=True, unique=True)
name = models.CharField(max_length=200)
username = models.CharField(max_length=15)
regid = models.CharField(max_length=15, default="", blank=True)
...
linkedclinic = models.ForeignKey(Clinic, on_delete=models.CASCADE)
class Patient(models.Model):
cstid = models.AutoField(primary_key=True, unique=True)
date_of_registration = models.DateField(default=timezone.now)
name = models.CharField(max_length=35, blank=False)
ageyrs = models.IntegerField(blank=True)
agemnths = models.IntegerField(blank=True)
dob = models.DateField(null=True, blank=True)
...
linkedclinic = models.ForeignKey(Clinic, on_delete=models.CASCADE)
class UserGroupMap(models.Model):
id = models.AutoField(primary_key=True, unique=True)
user = models.ForeignKey(
User, related_name='target_user', on_delete=models.CASCADE)
group = models.ForeignKey(UserGroup, on_delete=models.CASCADE)
clinic = models.ForeignKey(Clinic, on_delete=models.CASCADE)
...
From my Vue app, I will post using Axios to the django app which uses DRF, and thus get serialized data of Patients and Doctors. It all works fine if I try to use the following sample code in function view:
#api_view(['GET', 'POST'])
def register_patient_vue(request):
if request.method == 'POST':
print("POST details", request.data)
data = request.data['registration_data']
serializer = customerSpecialSerializer(data=data)
if serializer.is_valid():
a = serializer.save()
print(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
print("Serializer is notNot valid.")
print(serializer.errors)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
Sample output:
POST details {'registration_data': {'name': 'wczz', 'ageyrs': 21, 'agemonths': '', 'dob': '', 'gender': 'unspecified', 'mobile': '2', 'email': '', 'alternate': '', 'address': '', 'marital': 'unspecified', 'city': '', 'occupation': '', 'linkedclinic': 10}}
data: {'name': 'wczz', 'ageyrs': 21, 'agemonths': '', 'dob': '', 'gender': 'unspecified', 'mobile': '2', 'email': '', 'alternate': '', 'address': '', 'marital': 'unspecified', 'city': '', 'occupation': '', 'linkedclinic': 10}
However, I need to authenticate the request by special custom authentication. I have another class called UserGroupMap which has Foreign Keys for both User and Clinic, so that if there is a match for a filter for the clinic and user, in the map, it will authenticate. Else it should fail authentication and the data should not be retrieved or serializer saved.
In my previous simple pure django project I used to employ a custom permission function, and decorating my view with it:
#handle_perm(has_permission_level, required_permission='EDIT_CLINICAL_RECORD', login_url='/clinic/')
def some_function(request, dept_id):
....
Some code which runs after authentication
And it would use the following:
def handle_perm(test_func, required_permission=None, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
#wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
print(f"Required permission level is {required_permission}")
if has_permission_level(request, required_permission):
print("User has required permission level..Allowing entry.")
return view_func(request, *args, **kwargs)
print("FAILED! User does not have required permission level. Access blocked.")
path = request.build_absolute_uri()
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
path, resolved_login_url, redirect_field_name)
return _wrapped_view
return decorator
def has_permission_level(request, required_permission, clinic=None):
print("has_permission_level was called.")
user = request.user
print(f'user is {user}')
clinic=clinic_from_request(request)
print(f"has_permission_level called with clinic:{clinic}")
if clinic is None:
print("clinic is none")
return HttpResponseRedirect('/accounts/login/')
group_maps = UserGroupMap.objects.filter(user=user, clinic=clinic)
print(f"No: of UserGroupMap memberships: {len(group_maps)}")
if len(group_maps) < 1:
# There are no UserGroupMap setup for the user. Kindly set them up.\nHint:Admin>Manage users and groups>Users
return False
# Now checking Group memberships whether the user has any with permisison
for map in group_maps:
rolesmapped = GroupRoleMap.objects.filter(group=map.group)
if len(rolesmapped) < 1:
print(f"No permission roles.")
else:
for rolemap in rolesmapped:
print(f"{rolemap.role}", end=",")
if rolemap.role.name == required_permission:
print(
f"\nAvailable role of [{map.group}] matched required permission of [{required_permission}] in {clinic.name} [Ok]")
return True
return False
I need to build a custom authentication using DRF, so that it reads the POSTed data, and checks the linkedclinic value, and employs similiar logic.
I started like this:
def has_permission_POST(request, required_permission, clinic=None):
print("has_permission_POST was called.")
user = request.user
print(f'user is {user}')
if request.method == 'POST':
print(request)
print(dir(request))
print("POST details: POST:", request.POST, "\n")
print("POST details: data:", request.data, "\n")
....
# Further logic to check the mapping
return True
else:
print("Not a valid POST")
return Response("INVALID POST", status=status.HTTP_400_BAD_REQUEST)
# And decorating my DRF view:
#handle_perm(has_permission_POST, required_permission='EDIT_CLINICAL_RECORD', login_url='/clinic/')
#api_view(['GET', 'POST'])
def register_patient_vue(request):
if request.method == 'POST':
print("POST details", request.data)
data = request.data['registration_data']
The problem is that if I run this, then, has_permission_POST cannot get the value of request.data, which contains the data posted from my frontend. I can work around this, by adding the #api_view(['GET', 'POST']) decorator to has_permission_POST. But that introduces another error, a failed assertion:
AssertionError: Expected a `Response`, `HttpResponse` or `HttpStreamingResponse` to be returned from the view, but received a `<class 'bool'>`
This happens from has_permission_POST once it is decorated with #api_view.
So my problems:
How to implement a custom authentication for my use case?
If I am going about this right, by using this custom has_permission_level, how can I get the request.data in this function before my actual api view is called, so that I can read the clinic id and do the checks for permission that I need.
I have taken a look at the CustomAuthentication provided by DRF, but could not find out how to get the request.data parameters in the custom class.
Thanks to #MihaiChelaru, I was able to find a solution to my problem.
I created a custom Permission class by extending permissions.BasePermission, and using my custom logic in the special has_permission function. I went a step further and implemented checking of Token from the request. Once token is authenticated, the user can be got from the matching token from the Token table. I found that in the custom permission class, I could read the full request.data paramter passed by Vue and Postman. Once I read that, I could easily implement the custom checking of User permissions that my custom models had.
class CustomerAccessPermission(permissions.BasePermission):
message = 'No permission to create new patient records'
def has_permission(self, request, view):
bearer_authorizn = request.META.get('HTTP_AUTHORIZATION')
try: #Different apps like POSTMAN, and Vue seem to use different strings while passing token
token = bearer_authorizn.split("Bearer ")[1]
except Exception as e:
try:
token = bearer_authorizn.split("Token ")[1]
except Exception as e:
raise NotAuthenticated('Did not get token in request')
try:
token_obj = Token.objects.get(key=token)
except self.model.DoesNotExist:
raise AuthenticationFailed('Invalid token')
if not token_obj.user.is_active:
raise AuthenticationFailed('User inactive or deleted')
print("Username is %s" % token_obj.user.username)
print("POST details", request.data)
linkedclinic_id = request.data['data']['linkedclinic']
clinic = Clinic.objects.get(clinicid=int(linkedclinic_id))
print("Clinic membership requested:", clinic)
group_maps = UserGroupMap.objects.filter(user=user, clinic=clinic)
print(f"No: of UserGroupMap memberships: {len(group_maps)}")
if len(group_maps) > 1:
return True
return False
#api_view(['POST'])
#permission_classes([CustomerAccessPermission])
def register_patient_vue(request):
logger.info('In register_patient_vue...')
...
I am trying to implement multiple database support for my django (version 1.11) app. For that purpose I have included in my settings.py:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'labrin_dbase',
'USER': 'labrin_admin',
'PASSWORD': 'ndzwwZHv63STuvAF?C_$L#j#*#epZXaX',
'HOST': 'localhost',
'PORT': '5432',
},
'comment': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'labrin_dbase_comments',
'USER': 'labrin_admin_comments',
'PASSWORD': 'adzwaTHv63STuvAF!C_$L#j#*#epZXaY',
'HOST': 'localhost',
'PORT': '5433',
}
}
DATABASE_ROUTERS = [
'labrin_task.comment_router.CommentRouter',
]
And my database router is configured as below:
class CommentRouter(object):
def db_for_read(self, model, **hints):
if model._meta.db_table == 'todo_comment':
return 'comment'
return None
def db_for_write(self, model, **hints):
if model._meta.db_table == 'todo_comment':
return 'comment'
return None
def allow_relation(self, obj1, obj2, **hints):
return True
def allow_migrate(self, db, app_label, model_name=None, **hints):
if model_name == 'comment':
return db == 'comment'
return None
Models in my "todo" app(which is only app in project):
from django.db import models
from django.contrib.auth import get_user_model
UserModel = get_user_model()
class Todo(models.Model):
name = models.CharField(max_length=64)
description = models.TextField()
author = models.ForeignKey(UserModel, on_delete=models.CASCADE)
deadline = models.DateTimeField()
created_at = models.DateTimeField(auto_now_add=True)
class Comment(models.Model):
todo = models.ForeignKey(Todo, on_delete=models.CASCADE)
author = models.ForeignKey(UserModel, on_delete=models.CASCADE)
text = models.CharField(max_length=256)
created_at = models.DateTimeField(auto_now_add=True)
class ShareTodo(models.Model):
todo = models.ForeignKey(Todo, on_delete=models.CASCADE)
with_user = models.ForeignKey(UserModel, on_delete=models.CASCADE)
comment_allowed = models.BooleanField(default=False)
When I remove comment database and DATABASE_ROUTERS from settings.py, my app is working normally. After adding mentioned to settings.py, my app returns an error when I create Comment object. The error says:
Exception inside application: insert or update on table "todo_comment" violates foreign key constraint "todo_comment_author_id_bb272a3e_fk_auth_user_id"
DETAIL: Key (author_id)=(1) is not present in table "auth_user". What am I doing wrong?
Note: I am starting two postgres servers as separate docker containers and after running containers, I run python manage.py migrate and python manage.py migrate --database=comment for making all migrations.
Sorry, but cross-database relations are not possible to recreate in Django. You can find full explanation in Django docs.
Furthermore, you cannot even do cross-database relations in PostgreSQL so even trying to hack it or to achieve it outside of Django won't be possible. Maybe for other database engines it is possible, you can do your own research.
After changing the field used as the primary key in a model, I now receive an error when trying to create an object from a form.
I have deleted the sqlite database file, everything in the migrations directory, and performed makemigrations and migrate. I do not believe the problem is with the database, rather something in the code no longer functions the same now that I am not using a custom primary key.
As someone new to Django, I suspect I am missing something fundamental but cannot quite identify what that is.
views.py
#login_required
def job_create(request):
client = request.POST.get('client')
form = JobForm(request.POST or None)
form.fields['client'].initial = Client.objects.get(client_name=client)
if request.method == "POST":
if form.is_valid():
form.save()
return JsonResponse({"Success": True})`
models.py
class Client(models.Model):
client_name = models.CharField(max_length=255, unique=True)
def __str__(self):
return self.client_name
class Job(models.Model):
client = models.ForeignKey(Client, on_delete=models.CASCADE)
job_number = models.CharField(validators=[RegexValidator(regex='^\d{4}$', message='Invalid job number', code='invalid')], max_length=4, unique=True)
job_description = models.CharField(max_length=30)
forms.py
class JobForm(forms.ModelForm):
class Meta:
model = Job
fields = ('client', 'job_number', 'job_description',)`
The above code fails to create and save the object into the database. Below is my attempt to recreate this using the Django shell:
>>> from myproject.models import Client, Job
>>> from myproject.forms import JobForm
>>> client = Client.objects.get(client_name='John')
>>> jobform = JobForm({'client': client, 'job_description':'This is a job description', 'job_number':'4321'})
>>> jobform.errors
{'client': ['Select a valid choice. That choice is not one of the available choices.']}
Database Columns
sqlite> PRAGMA table_info(myproject_job);
0|id|integer|1||1
1|job_number|varchar(4)|1||0
2|job_description|varchar(30)|1||0
3|client_id|integer|0||0
Solution for now
#login_required
def job_create(request):
if request.method == "POST":
client = Client.objects.get(client_name=request.POST.get("client"))
request.POST = request.POST.copy()
request.POST["client"] = client.id
form = JobForm(request.POST)
if form.is_valid():
form.save()
return JsonResponse({"success": "true"})
I use Django 2.1 on python 3.6 with pytest-django 3.4
I like to test the clean() method of a form defined like this :
from django.forms import HiddenInput, ModelForm, ValidationError
from log.models import Entry
class EntryForm(ModelForm):
class Meta:
model = Entry
fields = ['user', 'contact', 'title', 'desc']
widgets = {
'user': HiddenInput(),
'contact': HiddenInput(),
}
def __init__(self, *args, **kwargs):
""" Get back user & contact obj for `self.clean()` """
user = kwargs.pop('user')
contact = kwargs.pop('contact')
super(EntryForm, self).__init__(*args, **kwargs)
self.fields['user'].initial = user
self.fields['contact'].initial = contact
def clean(self):
"""
Checks if a entry is added on a contact owned by the connected user
"""
cleaned_data = super(EntryForm, self).clean()
if 'user' in self.changed_data or 'contact' in self.changed_data:
raise ValidationError("Hidden input changed")
if cleaned_data['contact'].user != cleaned_data['user']:
raise ValidationError("Not allowed")
Outside tests, in a browser, this work as charm even if I change the values of hidden inputs : the ValidationError is raised.
I think about using monkeypatch but I did not understand how to inject my test conditions in a django class…
I use my feelings to build this test, but I cannot raise the expected ValidationError :
def fake_entry_form__init__():
self.fields['user'].initial = 'initial user'
self.fields['contact'].initial = 'initial contact'
def fake_entry_form_unvalid_changed_data():
return {
'user': 'foo user',
'contact': 'foo contact'
}
def test_entry_form_clean_unvalid(monkeypatch):
monkeypatch.setattr('log.forms.EntryForm.__init__', fake_entry_form__init__)
form = EntryForm
monkeypatch.setattr('log.forms.EntryForm.changed_data', fake_entry_form_unvalid_changed_data)
try:
form.clean
assert False
except KeyError:
assert True
Am I on a good track or completely wrong?
I am new in django, CBV & testing, this is maybe a very obvious case, but I did not find explanation about it.
Is that possible to have model with foreign key fields on different databases?
example:
class MultiBDModel(models.Model):
db1_user = models.ForeignKey(User) # here suppose to be foreign key on `db1`
db2_user = models.ForeignKey(User) # and here on `db2`
maybe copy somehow User. Apply for it custom manager. Which returns query set with using='db1'
in settings.py:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'db1', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
},
'website': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'db2', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3. # Set to empty string for default. Not used with sqlite3.
}
}
No. As written by #ignacio-vazquez-abrams, one model must have all fields in the same database.
BUT
As an alternative, you could use a proxy model to link between models from two different databases.
Aim
One model should provide the fields from db1 and db2 at the same time
General trick
You have the model ContactMessage from db1, that we will name legacy_db. We suppose you don't want to touch to this model since it comes from another project.
Create a proxy model ProxyContactMessage, it has the same attributes than ContactMessage.
Use a database router to tell Django where to look in legacy_db for ProxyContactMessage objects.
Add a new model ExtendedContactMessage with the fields you would like to add. Declare a OneToOneField with ProxyContactMessage. This data will be saved to your db2 django_db.
Your proxy model cannot hold the new fields since it's abstract, but it can have methods that ask the related ExtendedContactMessage object (if any). Add the callables you want.
Example
In your legacy_app/models.py, the model on db1 legacy_db is:
class ContactMessage(models.Model):
subject = models.CharField(max_length=255)
message = models.TextField()
created_at = models.DateTimeField()
created_by = models.CharField(max_length=255)
class Meta:
managed = False
db_table = 'contact_message'
def __unicode__(self):
return self.subject
Therefore you create in myapp/models.py:
class ProxyContactMessage(ContactMessage):
class Meta:
proxy = True
verbose_name = 'Contact message'
verbose_name_plural = 'Contact messages'
def add_extension(self):
e = ExtendedContactMessage(contact_message=self)
e.save()
return e
def mark_as_processed(self):
try:
e = self.extendedcontactmessage
except ExtendedContactMessage.DoesNotExist:
e = self.add_extension()
e.mark_as_processed()
def processed(self):
return self.extendedcontactmessage.processed
def processed_at(self):
return self.extendedcontactmessage.processed_at
class ExtendedContactMessage(models.Model):
contact_message = models.OneToOneField(ProxyContactMessage)
processed = models.BooleanField(default=False, editable=False)
processed_at = models.DateTimeField(null=True, default=None, editable=False)
def mark_as_processed(self):
self.processed = True
self.processed_at = timezone.now()
self.save()
Note that only the non abstract model ExtendedContactMessage will be saved in db2, since ProxyContactMessage is abstract.
In settings.py, set DATABASE_ROUTERS with the class
class LegacyRouter(object):
"""
A router to control all database operations on models in the
legacy database.
"""
def db_for_read(self, model, **hints):
if model.__name__ == 'ProxyContactMessage':
return 'legacy_db'
return None
def db_for_write(self, model, **hints):
"""
Attempts to write in legacy DB for ContactMessage.
"""
if model.__name__ == 'ProxyContactMessage':
return 'legacy_db'
return None
Your default router sends everything to db2.
Finally you may have an admin class like:
def mark_as_processed(modeladmin, request, queryset):
for obj in queryset:
obj.mark_as_processed()
mark_as_processed.short_description = "Mark as processed"
class ProxyContactMessageAdmin(admin.ModelAdmin):
list_display = (
'subject',
'message',
'created_at',
'created_by',
'processed',
'processed_at',
)
actions = (mark_as_processed,)
admin.site.register(ProxyContactMessage, ProxyContactMessageAdmin)
Related:
Use a router for the proxy class
"Hack" the app_name in Meta
Catch the queryset
No. The ORM cannot do anything the database engine isn't capable of.