I have a script that I am using to populate an sqlite3 database from a .txt file in django however once the data is loaded in it I don't seem to be able to reference the objects with a pk. I can confirm the data has been loaded by doing ./manage.py shell and importing the City model and doing City.objects.all() shows that the models are loaded into the database but if I do City.object.get(pk=1) then it thinks that the City object does not exist.
I have had some issues with the database in the past so I recently did ./manage.py flush followed by a reinput of the data with the script. This is when things started to be an issue. Any ideas?
Also I know that I don't strictly need the uniqueID value but I don't think this is causing an issue, it hasn't in the past.
script for loading the data:
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mygame.settings")
from geogame.models import City
f = open("testing.txt", 'r').readlines()
i = 1
for line in f:
line = line.strip('\n')
line = line.split('\t')
line[6] = 1
line[7] = 100
c = City(name=line[1], uniqueID=i, xcoord=int(line[3]), ycoord=int(line[2]), country=line[4], population=line[5], times_played=line[6], average_distance=line[7], difficulty_rating=line[8])
c.save()
i+=1
the models.py file
from django.db import models
class City(models.Model):
name = models.CharField(max_length=50)
uniqueID = models.IntegerField(default=0)
ycoord = models.IntegerField(default=0)
xcoord = models.IntegerField(default=0)
country = models.CharField(max_length=50)
population = models.IntegerField(default=0)
times_played = models.IntegerField(default=0)
average_distance = models.FloatField(default=0)
difficulty_rating = models.FloatField(default=0)
def __unicode__(self):
return self.name
And the error message:
>>> City.objects.get(pk=1)
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/Library/Python/2.7/site-packages/django/db/models/manager.py", line 127, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/Library/Python/2.7/site-packages/django/db/models/query.py", line 334, in get
self.model._meta.object_name
DoesNotExist: City matching query does not exist.
try adding id=i in c = City(). So that every time you create a new one, the id is adding 1 to it.
c = City(id=i, name=line[1], uniqueID=i,
xcoord=int(line[3]), ycoord=int(line[2]),
country=line[4],
population=line[5], times_played=line[6], average_distance=line[7],
difficulty_rating=line[8])
id in Your model has type AutoField. This type of firld automaticaly increment values when new row is added.
You can overwrite id when You create a new row
c = City(id=1, name='test', ...)
c.save()
City.objects.get(pk=1)
or read inserted id after save model
c = City(name='test', ...)
c.save()
lastId = c.pk
City.objects.get(pk=lastId)
Related
Line number: 1 - 'id'
1, 28799772, 306509594, "GOLDEN NILE AAA" MChJ, 1726294, Toshkent shahri,
Chilonzor tumani, X.TURSUNQULOV KOʻCHASI, 38-UY, 974079981, makhmudjon.khakimjonov#mail.ru, 47190
Traceback (most recent call last):
File "C:\Users\AbdulAziz\AppData\Local\Programs\Python\Python38\lib\site-packages\import_export\resources.py", line 708, in import_row
instance, new = self.get_or_init_instance(instance_loader, row)
File "C:\Users\AbdulAziz\AppData\Local\Programs\Python\Python38\lib\site-packages\import_export\resources.py", line 378, in get_or_init_instance
instance = self.get_instance(instance_loader, row)
File "C:\Users\AbdulAziz\AppData\Local\Programs\Python\Python38\lib\site-packages\import_export\resources.py", line 365, in get_instance
import_id_fields = [
File "C:\Users\AbdulAziz\AppData\Local\Programs\Python\Python38\lib\site-packages\import_export\resources.py", line 366, in <listcomp>
self.fields[f] for f in self.get_import_id_fields()
KeyError: 'id'
models.py
class Company(models.Model):
id = models.AutoField(primary_key=True)
okpo = models.PositiveIntegerField(null=True,blank=True)
inn = models.PositiveIntegerField(null=True,blank=False)
name = models.CharField(max_length=25,blank=False,default='Tashkilot Nomi')
soato = models.PositiveIntegerField(null=True,blank=False)
address = models.CharField(max_length=50,blank=True,null=True,default='Tashkilot Adresi')
street = models.CharField(max_length=50,blank=True,null=True,default='Tashkilot Street')
phonenumber = models.CharField(max_length=13, blank=True, null=True,default='+998')
email = models.EmailField(max_length=25,default='tashkilot#email.com')
oked = models.PositiveIntegerField(null=True,blank=False)
views.py
def import_file(request):
if request.method == 'POST' :
dataset = Dataset()
new_employee = request.FILES['myfile']
data_import = dataset.load(new_employee.read())
result = CompanyResources.import_data(dataset,dry_run=True)
if not result.has_errors():
CompanyResources.import_data(dataset,dry_run=False)
return render(request, 'home.html',{})
admin.py
class CompanyResources(resources.ModelResource):
class Meta:
models = Company
fields = [field.name for field in Company._meta.fields if field.name != 'id']
exclude = ['id',]
import_id_fields = ['id',]
class CompanyAdmin(ImportExportModelAdmin):
resource_class = CompanyResources
list_display = [field.name for field in Company._meta.fields]
admin.site.register(Company,CompanyAdmin)
This is happening because you are not declaring the field in your import which uniquely identifies the row. The issue is explained here.
To fix, find the field in your import which uniquely identifies the row. Add the name of this column to the model declaration.
class CompanyResources(resources.ModelResource):
class Meta:
models = Company
import_id_fields = ['<your field name here',]
If you don't have a single field which uniquely identifies the row, can you use a combination of fields? If so, declare those fields in the list.
If you do not have fields which uniquely identify the row, import-export will not be able to function correctly. Please see the docs for more information.
I am trying to use 'natural keys' for serialization (docs) in a "manage.py dumpdata" command:
python manage.py dumpdata --natural-primary --natural-foreign --indent 4 --format json --verbosity 1 > tests\test_fixtures\test_db2.json
and I am getting the following error when I use --natural-foreign on other apps that use the Project or Task model (which they all must by design):
CommandError: Unable to serialize database: Object of type Project is not JSON serializable
Exception ignored in: <generator object cursor_iter at 0x000001EF62481B48>
Traceback (most recent call last):
File "C:\Users\Andrew\anaconda3\envs\Acejet_development\lib\site-packages\django\db\models\sql\compiler.py", line 1586, in cursor_iter
cursor.close()
sqlite3.ProgrammingError: Cannot operate on a closed database.
If I just dumpdata from this, the 'projects' app, it works, but other apps are built with entities related to Project or Task and there the --natural-foreign option fails.
The problem occurs when a model (say Question) calls for a natural_key from Task, which includes a for a natural_key from Project.
If I use the Pycharm Python Console to access querysets of Projects or Tasks ('q' here), this works:
serializers.serialize('json', q, indent=2, use_natural_foreign_keys=True, use_natural_primary_keys=True)
But if 'w' is a list of Question objects from another app that have a Task foreign key I get this error:
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "C:\Users\Andrew\anaconda3\envs\Acejet_development\lib\site-packages\django\core\serializers\__init__.py", line 128, in serialize
s.serialize(queryset, **options)
File "C:\Users\Andrew\anaconda3\envs\Acejet_development\lib\site-packages\django\core\serializers\base.py", line 115, in serialize
self.end_object(obj)
File "C:\Users\Andrew\anaconda3\envs\Acejet_development\lib\site-packages\django\core\serializers\json.py", line 53, in end_object
json.dump(self.get_dump_object(obj), self.stream, **self.json_kwargs)
File "C:\Users\Andrew\anaconda3\envs\Acejet_development\lib\json\__init__.py", line 179, in dump
for chunk in iterable:
File "C:\Users\Andrew\anaconda3\envs\Acejet_development\lib\json\encoder.py", line 431, in _iterencode
yield from _iterencode_dict(o, _current_indent_level)
File "C:\Users\Andrew\anaconda3\envs\Acejet_development\lib\json\encoder.py", line 405, in _iterencode_dict
yield from chunks
File "C:\Users\Andrew\anaconda3\envs\Acejet_development\lib\json\encoder.py", line 405, in _iterencode_dict
yield from chunks
File "C:\Users\Andrew\anaconda3\envs\Acejet_development\lib\json\encoder.py", line 325, in _iterencode_list
yield from chunks
File "C:\Users\Andrew\anaconda3\envs\Acejet_development\lib\json\encoder.py", line 438, in _iterencode
o = _default(o)
File "C:\Users\Andrew\anaconda3\envs\Acejet_development\lib\site-packages\django\core\serializers\json.py", line 104, in default
return super().default(o)
File "C:\Users\Andrew\anaconda3\envs\Acejet_development\lib\json\encoder.py", line 179, in default
raise TypeError(f'Object of type {o.__class__.__name__} '
TypeError: Object of type Project is not JSON serializable
The models are:
# projects.models.py
class BaseModelWithHistory(models.Model):
history = HistoricalRecords(inherit=True)
natural_key_fields = ('id',) # default
class Meta:
abstract = True
def natural_key(self):
fieldlist = [getattr(self, fieldname) for fieldname in self.natural_key_fields]
return tuple(fieldlist)
# natural_key.dependencies = ['projects.Project', 'projects.Task'] # serialize these first.
class Project(BaseModelWithHistory):
"""
'Projects' group Tasks.
"""
project_name = models.CharField(max_length=200, default="development_project")
project_short_description = models.CharField(
max_length=500,
default="This is the default text.")
target_group = models.ManyToManyField(Group, blank=True)
objects = ProjectDiscreteManager()
natural_key_fields = ('project_name',)
class Task(BaseModelWithHistory):
number = models.PositiveIntegerField(default=0)
name = models.CharField(max_length=200, default='new task')
project = models.ForeignKey(Project, on_delete=models.CASCADE)
target_group = models.ManyToManyField(Group, blank=True)
app_label = models.CharField(max_length=80, choices=app_choices(), null=True, blank=True)
objects = discrete_manager_factory('project')
natural_key_fields = ('project', 'name', 'number')
Things that I wouldn't expect to cause this problem but I could certainly be wrong:
The ProjectDiscreteManager and others created by discrete_manager_factory() behave exactly as the default manager (models.Manager()), unless its called with a request from an identified user, in which case it adds a filter to see if that user is in the Group.
All models define the natural_keys tuple because the parent class defines it as ('id',); most models overwrite this with more representative fields.
With the natural_key.dependencies list set for all models to prioritize Project and then Task, I get a 'can't resolve dependencies' error for every other model. I think this ticket relates, but am not sure how to track down whether this fix is already in the Django 3.0.6 I'm using and I should just straighten up & fly right, or if my Han Solo 'this should wooork' will one day soon be rewarded. [Update: I worked out it is coming in Django 3.1.1, but I'm not sure that it is going to fix the "can't resolve dependencies" error I've created for myself.]
Hi I'm trying to populate the PostGIS database of my Django application using a Shapefile. My models.py is the following :
class Flow(models.Model):
# Primary key
flow_id = models.AutoField("ID, flow identifier", primary_key=True)
# Other attributes
stime = models.DateTimeField("Start time")
stime_bc = models.IntegerField("Year of start time before Christ")
stime_unc = models.DateTimeField("Start time uncertainty")
etime = models.DateTimeField("End time")
etime_bc = models.IntegerField("Year of end time before Christ")
etime_unc = models.DateTimeField("End time uncertainty")
final_vers = models.BooleanField("1 if it's the final version, else 0",
default=False)
com = models.CharField("Comments", max_length=255)
loaddate = models.DateTimeField("Load date, the date the data was entered "
"(in UTC)")
pubdate = models.DateTimeField("Publish date, the date the data become "
"public")
cb_ids = models.ManyToManyField(Bibliographic)
# Foreign key(s)
fissure_id = models.ForeignKey(Fissure, null=True,
related_name='flow_fissure_id',
on_delete=models.CASCADE)
cc_load_id = models.ForeignKey(Contact, null=True,
related_name='flow_cc_id_load',
on_delete=models.CASCADE)
cc_pub_id = models.ForeignKey(Contact, null=True,
related_name='flow_cc_id_pub',
on_delete=models.CASCADE)
# Override default table name
class Meta:
db_table = 'flow'
I want to add the features of my coulees.shp Shapefile into my database (more precisely in the flow table). The attribute table looks like this:
Atribute table
To do so I use the Django Layer Mapping:
import os
from django.contrib.gis.utils import LayerMapping
from .models import Flow
mapping = {'stime':'stime', 'stime_bc':'stime_bc', 'stime_unc':'stime_unc', 'etime':'etime', 'etime_bc':'etime_bc', 'etime_unc':'etime_unc', 'com':'com', 'loaddate':'loaddate', 'pubdate':'pubdate', 'geometry':'geometry'}
shp = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'data', '/home/sysop/Coulees/coulees.shp')
)
def run(verbose=True):
lm = LayerMapping(
Flow, shp, mapping,
transform=True
)
lm.save(verbose=True)
But when I try to run this function I get the following error:
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/opt/mapobs/mapobs/app/load.py", line 30, in run
transform=True
File "/opt/mapobs/lib/python3.5/site-packages/django/contrib/gis/utils /layermapping.py", line 106, in __init__
self.check_layer()
File "/opt/mapobs/lib/python3.5/site-packages/django/contrib/gis/utils/layermapping.py", line 256, in check_layer
(ogr_field, ogr_field.__name__, fld_name))
django.contrib.gis.utils.layermapping.LayerMapError: OGR field "<class 'django.contrib.gis.gdal.field.OFTDate'>" (of type OFTDate) cannot be mapped to Django DateTimeField.
Unfortunately, I can't find any useful documentation on the internet.
I am trying to import some csv data into a postgresql database using django-import-export. I am using python 3.4, django 1.8.1, and import-export 0.5.1.
My settings file has the following directive:
IMPORT_EXPORT_USE_TRANSACTIONS = True
my model has 10 fields. Postgresql autoincrements an 11th field, the id.
class BankTransaction(models.Model):
tacct = models.ForeignKey(MetaAcct,on_delete=models.CASCADE)
tdate = models.DateTimeField('tdate')
tctpty = models.ForeignKey(Counterparty,on_delete=models.CASCADE)
TRANSACTOR_CHOICES = (
('XXX', 'transactor1'),
('YYY', 'transactor2'),
)
tuser = models.CharField(max_length=3,choices=TRANSACTOR_CHOICES,default="LMH") # eg LMH
trec = models.IntegerField(default=0, blank=True)
tnote = models.CharField(default='',max_length=50, blank=True)
tcheckno = models.IntegerField(default=0, blank=True, null=True)
ttype = models.ForeignKey(TransactionType,on_delete=models.CASCADE)
tamt = models.DecimalField(max_digits=10, decimal_places=2)
tmemo = models.CharField(default='',max_length=20, blank=True)
def __str__(self):
label = str.join(',',(str(self.tacct),str(self.tctpty)))
return label
In admin.py I whitelist 8 fields, including the id field:
from django.contrib import admin
from import_export import resources
from .models import MetaAcct,Counterparty,TransactionType,BankTransaction
admin.site.register(MetaAcct)
admin.site.register(Counterparty)
admin.site.register(TransactionType)
admin.site.register(BankTransaction)
class MetaAcctResource(resources.ModelResource):
class Meta:
model = MetaAcct
class BankTransactionResource(resources.ModelResource):
class Meta:
model = BankTransaction
fields = ('id','tacct','tdate','tctpty','tuser','trec','ttype','tamt')
My shell commands come right out of Read-the-docs:
>>> import tablib
>>> from import_export import resources
>>> from mny.models import BankTransaction
>>> banktransaction_resource = resources.modelresource_factory(model=BankTransaction)()
>>> dataset = tablib.Dataset(['','/Users/userid/Downloads/pa upload/test.csv'], headers=['id','tacct','tdate','tctpty','tuser','trec','ttype','tamt'])
The file I am trying to import, test.csv, looks like this:
id,tacct,tdate,tctpty,tuser,trec,ttype,tamt
,test,2015-01-13 0:00:00,TEST_ACCT,XXX,20151031,xfer,20000
,test,2015-01-31 0:00:00,BANK_ACCT,XXX,20151031,Int Inc,0.09
,test,2015-11-30 0:00:00,BANK_ACCT,XXX,20151130,Int Inc,1.49
,test,2015-12-17 0:00:00,TEST_ACCT,XXX,20151231,xfer,12000
,test,2015-12-31 0:00:00,BANK_ACCT,XXX,20151231,Int Inc,0.2
,test,2016-01-31 0:00:00,BANK_ACCT,XXX,20160131,Int Inc,0.24
,test,2016-02-29 0:00:00,BANK_ACCT,XXX,20160229,Int Inc,0.23
,test,2016-03-31 0:00:00,BANK_ACCT,XXX,20160331,Int Inc,0.25
,test,2016-04-08 0:00:00,TEST_ACCT,XXX,20160427,xfer,-6000
,test,2016-04-30 0:00:00,BANK_ACCT,XXX,20160430,Int Inc,0.2
,test,2016-05-31 0:00:00,BANK_ACCT,XXX,20160531,Int Inc,0.2
,test,2016-06-30 0:00:00,BANK_ACCT,XXX,20160630,Int Inc,0.19
,test,2016-07-31 0:00:00,BANK_ACCT,XXX,20160731,Int inc,0.2
,test,2016-08-31 0:00:00,BANK_ACCT,XXX,20160831,Int Inc,0.2
,test,2016-09-30 0:00:00,BANK_ACCT,XXX,20160930,Int Inc,0.19
,test,2016-10-31 0:00:00,BANK_ACCT,XXX,20161031,Int Inc,0.2
,test,2016-11-30 0:00:00,BANK_ACCT,XXX,20161130,Int Inc,0.19
,test,2017-01-30 0:00:00,TEST_ACCT,XXX,20170131,xfer,15000
,test,2017-01-31 0:00:00,BANK_ACCT,XXX,20170131,Inc Inc,0.21
Here is the traceback:
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/Users/userid/Documents/django/testenv/lib/python3.4/site-packages/tablib/core.py", line 169, in __init__
self.headers = kwargs.get('headers')
File "/Users/userid/Documents/django/testenv/lib/python3.4/site-packages/tablib/core.py", line 338, in _set_headers
self._validate(collection)
File "/Users/userid/Documents/django/testenv/lib/python3.4/site-packages/tablib/core.py", line 286, in _validate
raise InvalidDimensions
tablib.core.InvalidDimensions
Because of testing, the latest id in the database model is 21, so the next auto-increment should be 22.
It looks as if you may be using the tablib Dataset object incorrectly. Try this...
tablib.Dataset(headers=['id','tacct','tdate','tctpty','tuser','trec','ttype','tamt'])\
.load(open('/Users/userid/Downloads/pa upload/test.csv').read())
Check out the tutorial to see other examples.
I'm using django-import-export to load csv files in a migration file which I understand is current best practise for Django 1.7 to load initial data. This worked fine for the first file:
class Country(models.Model):
ISO2 = models.CharField(max_length=2, primary_key=True)
name = models.CharField(max_length=50, unique=True)
and the entire migration file contents. note that ISO2 is the primary key so required the addition line import_id_fields = ['ISO2']. Code adapted from answer to this question: Loading initial data with Django 1.7 and data migrations:
from django.conf import settings
import tablib
from import_export import resources
from web.models import Country
import os
import csv
# load initial data - assume it is all in web/fixtures directory
class CountryResource(resources.ModelResource):
class Meta:
model = Country
import_id_fields = ['ISO2']
fixture_dir = os.path.abspath(os.path.join(settings.PROJECT_ROOT, 'web/fixtures'))
fixture_filename = 'web_country.csv'
def load_fixture(apps, schema_editor):
fixture_file = os.path.join(fixture_dir, fixture_filename)
with open(fixture_file, 'r') as content_file:
content = content_file.read()
resource = CountryResource()
data = tablib.Dataset()
data.csv = content
result = resource.import_data(data, dry_run=False,
raise_errors=True)
def unload_fixture(apps, schema_editor):
"Brutally deleting all entries for this model..."
country = apps.get_model("web", "Country")
country.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('web', '0001_initial'),
]
operations = [
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
Now for the next file which has a primary key relationship to that one:
class CountryFootprint(models.Model):
ISO2 = models.ForeignKey(Country)
footprint = models.DecimalField(max_digits=18, decimal_places=6)
and a subset of the migration file, with my attempt to link up the foreign key:
class CountryFootprintResource(resources.ModelResource):
ISO2_id = fields.Field( widget=widgets.ForeignKeyWidget(Country, 'ISO2'))
class Meta:
model = CountryFootprint
This give me:
django.db.models.fields.related.RelatedObjectDoesNotExist: CountryFootprint has no ISO2.
Also tried:
ISO2_id = fields.Field(column_name='ISO2_id', attribute='ISO2', widget=widgets.ForeignKeyWidget(Country, 'ISO2'))
File "/Users/phoebebr/Development/gmd/web/migrations/0003_auto_20141220_1931.py", line 43, in load_fixture
raise_errors=True)
File "/Users/phoebebr/.virtualenvs/gmd/lib/python2.7/site-packages/import_export/resources.py", line 359, in import_data
six.reraise(*sys.exc_info())
File "/Users/phoebebr/.virtualenvs/gmd/lib/python2.7/site-packages/import_export/resources.py", line 348, in import_data
row_result.object_repr = force_text(instance)
File "/Users/phoebebr/.virtualenvs/gmd/lib/python2.7/site-packages/django/utils/encoding.py", line 85, in force_text
s = six.text_type(s)
TypeError: coercing to Unicode: need string or buffer, Country found
I've read the documentation and I'm sure the answer is there but it's just not jumping out at me!
Either of these two lines work:
ISO2_id = fields.Field( widget=widgets.ForeignKeyWidget(Country, 'ISO2'))
or
ISO2_id = fields.Field(column_name='ISO2_id', attribute='ISO2', widget=widgets.ForeignKeyWidget(Country, 'ISO2'))
using just:
fields = ('ISO2', 'footprint')
gives error
django.db.models.fields.related.RelatedObjectDoesNotExist: CountryFootprint has no ISO2.
The coercing to Unicode error was caused by my not having a string returned from the unicode def:
def __unicode__(self):
return self.ISO2
should have been
def __unicode__(self):
return self.ISO2.name
so many coding problems solved by a good nights sleep!