Shapefile to PostGIS import generates django datetime error - django

Hi I'm trying to populate the PostGIS database of my Django application using a Shapefile. My models.py is the following :
class Flow(models.Model):
# Primary key
flow_id = models.AutoField("ID, flow identifier", primary_key=True)
# Other attributes
stime = models.DateTimeField("Start time")
stime_bc = models.IntegerField("Year of start time before Christ")
stime_unc = models.DateTimeField("Start time uncertainty")
etime = models.DateTimeField("End time")
etime_bc = models.IntegerField("Year of end time before Christ")
etime_unc = models.DateTimeField("End time uncertainty")
final_vers = models.BooleanField("1 if it's the final version, else 0",
default=False)
com = models.CharField("Comments", max_length=255)
loaddate = models.DateTimeField("Load date, the date the data was entered "
"(in UTC)")
pubdate = models.DateTimeField("Publish date, the date the data become "
"public")
cb_ids = models.ManyToManyField(Bibliographic)
# Foreign key(s)
fissure_id = models.ForeignKey(Fissure, null=True,
related_name='flow_fissure_id',
on_delete=models.CASCADE)
cc_load_id = models.ForeignKey(Contact, null=True,
related_name='flow_cc_id_load',
on_delete=models.CASCADE)
cc_pub_id = models.ForeignKey(Contact, null=True,
related_name='flow_cc_id_pub',
on_delete=models.CASCADE)
# Override default table name
class Meta:
db_table = 'flow'
I want to add the features of my coulees.shp Shapefile into my database (more precisely in the flow table). The attribute table looks like this:
Atribute table
To do so I use the Django Layer Mapping:
import os
from django.contrib.gis.utils import LayerMapping
from .models import Flow
mapping = {'stime':'stime', 'stime_bc':'stime_bc', 'stime_unc':'stime_unc', 'etime':'etime', 'etime_bc':'etime_bc', 'etime_unc':'etime_unc', 'com':'com', 'loaddate':'loaddate', 'pubdate':'pubdate', 'geometry':'geometry'}
shp = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'data', '/home/sysop/Coulees/coulees.shp')
)
def run(verbose=True):
lm = LayerMapping(
Flow, shp, mapping,
transform=True
)
lm.save(verbose=True)
But when I try to run this function I get the following error:
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/opt/mapobs/mapobs/app/load.py", line 30, in run
transform=True
File "/opt/mapobs/lib/python3.5/site-packages/django/contrib/gis/utils /layermapping.py", line 106, in __init__
self.check_layer()
File "/opt/mapobs/lib/python3.5/site-packages/django/contrib/gis/utils/layermapping.py", line 256, in check_layer
(ogr_field, ogr_field.__name__, fld_name))
django.contrib.gis.utils.layermapping.LayerMapError: OGR field "<class 'django.contrib.gis.gdal.field.OFTDate'>" (of type OFTDate) cannot be mapped to Django DateTimeField.
Unfortunately, I can't find any useful documentation on the internet.

Related

Getting mistake while importing xlsx file into database django. Please correct my code. I cant deal with it. PLEASE

Line number: 1 - 'id'
1, 28799772, 306509594, "GOLDEN NILE AAA" MChJ, 1726294, Toshkent shahri,
Chilonzor tumani, X.TURSUNQULOV KOʻCHASI, 38-UY, 974079981, makhmudjon.khakimjonov#mail.ru, 47190
Traceback (most recent call last):
File "C:\Users\AbdulAziz\AppData\Local\Programs\Python\Python38\lib\site-packages\import_export\resources.py", line 708, in import_row
instance, new = self.get_or_init_instance(instance_loader, row)
File "C:\Users\AbdulAziz\AppData\Local\Programs\Python\Python38\lib\site-packages\import_export\resources.py", line 378, in get_or_init_instance
instance = self.get_instance(instance_loader, row)
File "C:\Users\AbdulAziz\AppData\Local\Programs\Python\Python38\lib\site-packages\import_export\resources.py", line 365, in get_instance
import_id_fields = [
File "C:\Users\AbdulAziz\AppData\Local\Programs\Python\Python38\lib\site-packages\import_export\resources.py", line 366, in <listcomp>
self.fields[f] for f in self.get_import_id_fields()
KeyError: 'id'
models.py
class Company(models.Model):
id = models.AutoField(primary_key=True)
okpo = models.PositiveIntegerField(null=True,blank=True)
inn = models.PositiveIntegerField(null=True,blank=False)
name = models.CharField(max_length=25,blank=False,default='Tashkilot Nomi')
soato = models.PositiveIntegerField(null=True,blank=False)
address = models.CharField(max_length=50,blank=True,null=True,default='Tashkilot Adresi')
street = models.CharField(max_length=50,blank=True,null=True,default='Tashkilot Street')
phonenumber = models.CharField(max_length=13, blank=True, null=True,default='+998')
email = models.EmailField(max_length=25,default='tashkilot#email.com')
oked = models.PositiveIntegerField(null=True,blank=False)
views.py
def import_file(request):
if request.method == 'POST' :
dataset = Dataset()
new_employee = request.FILES['myfile']
data_import = dataset.load(new_employee.read())
result = CompanyResources.import_data(dataset,dry_run=True)
if not result.has_errors():
CompanyResources.import_data(dataset,dry_run=False)
return render(request, 'home.html',{})
admin.py
class CompanyResources(resources.ModelResource):
class Meta:
models = Company
fields = [field.name for field in Company._meta.fields if field.name != 'id']
exclude = ['id',]
import_id_fields = ['id',]
class CompanyAdmin(ImportExportModelAdmin):
resource_class = CompanyResources
list_display = [field.name for field in Company._meta.fields]
admin.site.register(Company,CompanyAdmin)
This is happening because you are not declaring the field in your import which uniquely identifies the row. The issue is explained here.
To fix, find the field in your import which uniquely identifies the row. Add the name of this column to the model declaration.
class CompanyResources(resources.ModelResource):
class Meta:
models = Company
import_id_fields = ['<your field name here',]
If you don't have a single field which uniquely identifies the row, can you use a combination of fields? If so, declare those fields in the list.
If you do not have fields which uniquely identify the row, import-export will not be able to function correctly. Please see the docs for more information.

Unit Testing Django Model Save Function

I'm creating tests to check that a custom calibration model save function updates an asset record (foreign key) if it is the latest calibration record for the asset. The save function performs exactly as expected in live dev & production server and even in the django shell, but appears to fail during testing...
models.py
class Asset(models.Model):
...
requires_calibration = models.BooleanField()
passed_calibration = models.BooleanField(default=False)
calibration_date_prev = models.DateField(null=True, blank=True)
calibration_date_next = models.DateField(null=True, blank=True)
class CalibrationRecord(models.Model):
calibration_record_id = models.AutoField(primary_key=True)
asset = models.ForeignKey(
"myapp.Asset",
on_delete=models.CASCADE,
limit_choices_to={"requires_calibration": True}
)
calibration_date = models.DateField(default=timezone.now)
calibration_date_next = models.DateField(null=True, blank=True)
calibration_outcome = models.CharField(max_length=10, default="Pass")
def save(self, *args, **kwargs):
super(CalibrationRecord, self).save(*args, **kwargs)
# Check if this is the latest calibration record for any asset, if so update asset.calibration_dates and status
latest_asset_calibration = CalibrationRecord.objects.filter(asset=self.asset.pk).order_by(
"-calibration_date", "-calibration_record_id")[0]
if self.pk == latest_asset_calibration.pk:
Asset.objects.filter(pk=self.asset.pk).update(calibration_date_prev=self.calibration_date)
if self.calibration_date_next:
Asset.objects.filter(pk=self.asset.pk).update(calibration_date_next=self.calibration_date_next)
else:
Asset.objects.filter(pk=self.asset.pk).update(calibration_date_next=None)
if self.calibration_outcome == "Pass":
Asset.objects.filter(pk=self.asset.pk).update(passed_calibration=True)
else:
Asset.objects.filter(pk=self.asset.pk).update(passed_calibration=False)
tests_models.py example failing test
class CalibrationRecordTests(TestCase):
def test_calibration_record_updates_asset_cal_date_prev(self):
"""
All calibration records should update related Asset record's "calibration_date_prev" to calibration_date
"""
asset1 = Asset.objects.create(asset_description="Test Asset 2", requires_calibration=True)
self.assertIsNone(asset1.calibration_date_prev)
cal = CalibrationRecord.objects.create(asset=asset1, calibration_description="Test Calibration 2", calibration_date=timezone.now())
self.assertEqual(cal.calibration_date, asset1.calibration_date_prev)
Error log
======================================================================
FAIL: test_calibration_record_updates_asset_cal_date_prev (assetregister.tests_models.CalibrationRecordTests)
----------------------------------------------------------------------
Traceback (most recent call last):
File "C:\[path]\app\tests_models.py", line 159, in test_calibration_record_updates_asset_cal_date_prev
self.assertEqual(cal.calibration_date, asset1.calibration_date_prev)
AssertionError: datetime.datetime(2018, 2, 26, 12, 26, 34, 457513, tzinfo=<UTC>) != None
======================================================================
All of my tests relating to this custom calibration record save function appear to fail because the related asset record isn't updated when it should be.
Any ideas why this would work during dev and production but not during testing?
Even though the .create() method should automatically do a .save() after, I even tried doing a manual .save() after creating the calibration record, but it still seems to fail.
Solved!
Custom save functions correctly update the database, but not the model instance being tested! Need to refresh the model instance to get any updates by calling it again with something like [model].objects.get()
e.g.:
asset = Asset.objects.create(asset_description="Test Asset 2", requires_calibration=True)
self.assertIsNone(asset.calibration_date_prev)
CalibrationRecord.objects.create(asset=asset, calibration_description="Test Calibration 2",
calibration_date=timezone.now())
cal = CalibrationRecord.objects.get(calibration_description="Test Calibration 2")
asset = Asset.objects.get(asset_description="Test Asset 2")
self.assertEqual(cal.calibration_date, asset.calibration_date_prev)
If you find yourself here, you'd be better off reloading the object with Model.refresh_from_db(using=None, fields=None). See the Django documentation
Also checkout Reload django object from database

attempt to import csv using django-import-export results in tablib.core.InvalidDimensions

I am trying to import some csv data into a postgresql database using django-import-export. I am using python 3.4, django 1.8.1, and import-export 0.5.1.
My settings file has the following directive:
IMPORT_EXPORT_USE_TRANSACTIONS = True
my model has 10 fields. Postgresql autoincrements an 11th field, the id.
class BankTransaction(models.Model):
tacct = models.ForeignKey(MetaAcct,on_delete=models.CASCADE)
tdate = models.DateTimeField('tdate')
tctpty = models.ForeignKey(Counterparty,on_delete=models.CASCADE)
TRANSACTOR_CHOICES = (
('XXX', 'transactor1'),
('YYY', 'transactor2'),
)
tuser = models.CharField(max_length=3,choices=TRANSACTOR_CHOICES,default="LMH") # eg LMH
trec = models.IntegerField(default=0, blank=True)
tnote = models.CharField(default='',max_length=50, blank=True)
tcheckno = models.IntegerField(default=0, blank=True, null=True)
ttype = models.ForeignKey(TransactionType,on_delete=models.CASCADE)
tamt = models.DecimalField(max_digits=10, decimal_places=2)
tmemo = models.CharField(default='',max_length=20, blank=True)
def __str__(self):
label = str.join(',',(str(self.tacct),str(self.tctpty)))
return label
In admin.py I whitelist 8 fields, including the id field:
from django.contrib import admin
from import_export import resources
from .models import MetaAcct,Counterparty,TransactionType,BankTransaction
admin.site.register(MetaAcct)
admin.site.register(Counterparty)
admin.site.register(TransactionType)
admin.site.register(BankTransaction)
class MetaAcctResource(resources.ModelResource):
class Meta:
model = MetaAcct
class BankTransactionResource(resources.ModelResource):
class Meta:
model = BankTransaction
fields = ('id','tacct','tdate','tctpty','tuser','trec','ttype','tamt')
My shell commands come right out of Read-the-docs:
>>> import tablib
>>> from import_export import resources
>>> from mny.models import BankTransaction
>>> banktransaction_resource = resources.modelresource_factory(model=BankTransaction)()
>>> dataset = tablib.Dataset(['','/Users/userid/Downloads/pa upload/test.csv'], headers=['id','tacct','tdate','tctpty','tuser','trec','ttype','tamt'])
The file I am trying to import, test.csv, looks like this:
id,tacct,tdate,tctpty,tuser,trec,ttype,tamt
,test,2015-01-13 0:00:00,TEST_ACCT,XXX,20151031,xfer,20000
,test,2015-01-31 0:00:00,BANK_ACCT,XXX,20151031,Int Inc,0.09
,test,2015-11-30 0:00:00,BANK_ACCT,XXX,20151130,Int Inc,1.49
,test,2015-12-17 0:00:00,TEST_ACCT,XXX,20151231,xfer,12000
,test,2015-12-31 0:00:00,BANK_ACCT,XXX,20151231,Int Inc,0.2
,test,2016-01-31 0:00:00,BANK_ACCT,XXX,20160131,Int Inc,0.24
,test,2016-02-29 0:00:00,BANK_ACCT,XXX,20160229,Int Inc,0.23
,test,2016-03-31 0:00:00,BANK_ACCT,XXX,20160331,Int Inc,0.25
,test,2016-04-08 0:00:00,TEST_ACCT,XXX,20160427,xfer,-6000
,test,2016-04-30 0:00:00,BANK_ACCT,XXX,20160430,Int Inc,0.2
,test,2016-05-31 0:00:00,BANK_ACCT,XXX,20160531,Int Inc,0.2
,test,2016-06-30 0:00:00,BANK_ACCT,XXX,20160630,Int Inc,0.19
,test,2016-07-31 0:00:00,BANK_ACCT,XXX,20160731,Int inc,0.2
,test,2016-08-31 0:00:00,BANK_ACCT,XXX,20160831,Int Inc,0.2
,test,2016-09-30 0:00:00,BANK_ACCT,XXX,20160930,Int Inc,0.19
,test,2016-10-31 0:00:00,BANK_ACCT,XXX,20161031,Int Inc,0.2
,test,2016-11-30 0:00:00,BANK_ACCT,XXX,20161130,Int Inc,0.19
,test,2017-01-30 0:00:00,TEST_ACCT,XXX,20170131,xfer,15000
,test,2017-01-31 0:00:00,BANK_ACCT,XXX,20170131,Inc Inc,0.21
Here is the traceback:
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/Users/userid/Documents/django/testenv/lib/python3.4/site-packages/tablib/core.py", line 169, in __init__
self.headers = kwargs.get('headers')
File "/Users/userid/Documents/django/testenv/lib/python3.4/site-packages/tablib/core.py", line 338, in _set_headers
self._validate(collection)
File "/Users/userid/Documents/django/testenv/lib/python3.4/site-packages/tablib/core.py", line 286, in _validate
raise InvalidDimensions
tablib.core.InvalidDimensions
Because of testing, the latest id in the database model is 21, so the next auto-increment should be 22.
It looks as if you may be using the tablib Dataset object incorrectly. Try this...
tablib.Dataset(headers=['id','tacct','tdate','tctpty','tuser','trec','ttype','tamt'])\
.load(open('/Users/userid/Downloads/pa upload/test.csv').read())
Check out the tutorial to see other examples.

Django primary key does not exist

I have a script that I am using to populate an sqlite3 database from a .txt file in django however once the data is loaded in it I don't seem to be able to reference the objects with a pk. I can confirm the data has been loaded by doing ./manage.py shell and importing the City model and doing City.objects.all() shows that the models are loaded into the database but if I do City.object.get(pk=1) then it thinks that the City object does not exist.
I have had some issues with the database in the past so I recently did ./manage.py flush followed by a reinput of the data with the script. This is when things started to be an issue. Any ideas?
Also I know that I don't strictly need the uniqueID value but I don't think this is causing an issue, it hasn't in the past.
script for loading the data:
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mygame.settings")
from geogame.models import City
f = open("testing.txt", 'r').readlines()
i = 1
for line in f:
line = line.strip('\n')
line = line.split('\t')
line[6] = 1
line[7] = 100
c = City(name=line[1], uniqueID=i, xcoord=int(line[3]), ycoord=int(line[2]), country=line[4], population=line[5], times_played=line[6], average_distance=line[7], difficulty_rating=line[8])
c.save()
i+=1
the models.py file
from django.db import models
class City(models.Model):
name = models.CharField(max_length=50)
uniqueID = models.IntegerField(default=0)
ycoord = models.IntegerField(default=0)
xcoord = models.IntegerField(default=0)
country = models.CharField(max_length=50)
population = models.IntegerField(default=0)
times_played = models.IntegerField(default=0)
average_distance = models.FloatField(default=0)
difficulty_rating = models.FloatField(default=0)
def __unicode__(self):
return self.name
And the error message:
>>> City.objects.get(pk=1)
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/Library/Python/2.7/site-packages/django/db/models/manager.py", line 127, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/Library/Python/2.7/site-packages/django/db/models/query.py", line 334, in get
self.model._meta.object_name
DoesNotExist: City matching query does not exist.
try adding id=i in c = City(). So that every time you create a new one, the id is adding 1 to it.
c = City(id=i, name=line[1], uniqueID=i,
xcoord=int(line[3]), ycoord=int(line[2]),
country=line[4],
population=line[5], times_played=line[6], average_distance=line[7],
difficulty_rating=line[8])
id in Your model has type AutoField. This type of firld automaticaly increment values when new row is added.
You can overwrite id when You create a new row
c = City(id=1, name='test', ...)
c.save()
City.objects.get(pk=1)
or read inserted id after save model
c = City(name='test', ...)
c.save()
lastId = c.pk
City.objects.get(pk=lastId)

Foreign Key in django migration using django-import-export

I'm using django-import-export to load csv files in a migration file which I understand is current best practise for Django 1.7 to load initial data. This worked fine for the first file:
class Country(models.Model):
ISO2 = models.CharField(max_length=2, primary_key=True)
name = models.CharField(max_length=50, unique=True)
and the entire migration file contents. note that ISO2 is the primary key so required the addition line import_id_fields = ['ISO2']. Code adapted from answer to this question: Loading initial data with Django 1.7 and data migrations:
from django.conf import settings
import tablib
from import_export import resources
from web.models import Country
import os
import csv
# load initial data - assume it is all in web/fixtures directory
class CountryResource(resources.ModelResource):
class Meta:
model = Country
import_id_fields = ['ISO2']
fixture_dir = os.path.abspath(os.path.join(settings.PROJECT_ROOT, 'web/fixtures'))
fixture_filename = 'web_country.csv'
def load_fixture(apps, schema_editor):
fixture_file = os.path.join(fixture_dir, fixture_filename)
with open(fixture_file, 'r') as content_file:
content = content_file.read()
resource = CountryResource()
data = tablib.Dataset()
data.csv = content
result = resource.import_data(data, dry_run=False,
raise_errors=True)
def unload_fixture(apps, schema_editor):
"Brutally deleting all entries for this model..."
country = apps.get_model("web", "Country")
country.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('web', '0001_initial'),
]
operations = [
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
Now for the next file which has a primary key relationship to that one:
class CountryFootprint(models.Model):
ISO2 = models.ForeignKey(Country)
footprint = models.DecimalField(max_digits=18, decimal_places=6)
and a subset of the migration file, with my attempt to link up the foreign key:
class CountryFootprintResource(resources.ModelResource):
ISO2_id = fields.Field( widget=widgets.ForeignKeyWidget(Country, 'ISO2'))
class Meta:
model = CountryFootprint
This give me:
django.db.models.fields.related.RelatedObjectDoesNotExist: CountryFootprint has no ISO2.
Also tried:
ISO2_id = fields.Field(column_name='ISO2_id', attribute='ISO2', widget=widgets.ForeignKeyWidget(Country, 'ISO2'))
File "/Users/phoebebr/Development/gmd/web/migrations/0003_auto_20141220_1931.py", line 43, in load_fixture
raise_errors=True)
File "/Users/phoebebr/.virtualenvs/gmd/lib/python2.7/site-packages/import_export/resources.py", line 359, in import_data
six.reraise(*sys.exc_info())
File "/Users/phoebebr/.virtualenvs/gmd/lib/python2.7/site-packages/import_export/resources.py", line 348, in import_data
row_result.object_repr = force_text(instance)
File "/Users/phoebebr/.virtualenvs/gmd/lib/python2.7/site-packages/django/utils/encoding.py", line 85, in force_text
s = six.text_type(s)
TypeError: coercing to Unicode: need string or buffer, Country found
I've read the documentation and I'm sure the answer is there but it's just not jumping out at me!
Either of these two lines work:
ISO2_id = fields.Field( widget=widgets.ForeignKeyWidget(Country, 'ISO2'))
or
ISO2_id = fields.Field(column_name='ISO2_id', attribute='ISO2', widget=widgets.ForeignKeyWidget(Country, 'ISO2'))
using just:
fields = ('ISO2', 'footprint')
gives error
django.db.models.fields.related.RelatedObjectDoesNotExist: CountryFootprint has no ISO2.
The coercing to Unicode error was caused by my not having a string returned from the unicode def:
def __unicode__(self):
return self.ISO2
should have been
def __unicode__(self):
return self.ISO2.name
so many coding problems solved by a good nights sleep!