Related
I dont know what happened with my db but now I can not len my queryset.
I can make a qs with a lot of obj with qs.SignalSma.objects.all()
But somehow I can not use len(qs) on that qs or make a loop with that qs
I am getting in this error if I try to do so.
In [9]: len(qs)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Cell In[9], line 1
----> 1 len(qs)
File ~\OneDrive\Desktop\dev-2023\signal\lib\site-packages\django\db\models\query.py:262, in QuerySet.__len__(self)
261 def __len__(self):
--> 262 self._fetch_all()
263 return len(self._result_cache)
File ~\OneDrive\Desktop\dev-2023\signal\lib\site-packages\django\db\models\query.py:1324, in QuerySet._fetch_all(self)
1322 def _fetch_all(self):
1323 if self._result_cache is None:
-> 1324 self._result_cache = list(self._iterable_class(self))
1325 if self._prefetch_related_lookups and not self._prefetch_done:
1326 self._prefetch_related_objects()
File ~\OneDrive\Desktop\dev-2023\signal\lib\site-packages\django\db\models\query.py:68, in ModelIterable.__iter__(self)
59 related_populators = get_related_populators(klass_info, select, db)
60 known_related_objects = [
61 (field, related_objs, operator.attrgetter(*[
62 field.attname
(...)
66 ])) for field, related_objs in queryset._known_related_objects.items()
67 ]
---> 68 for row in compiler.results_iter(results):
69 obj = model_cls.from_db(db, init_list, row[model_fields_start:model_fields_end])
70 for rel_populator in related_populators:
File ~\OneDrive\Desktop\dev-2023\signal\lib\site-packages\django\db\models\sql\compiler.py:1122, in SQLCompiler.apply_converters(self, rows, converters)
1120 value = row[pos]
1121 for converter in convs:
-> 1122 value = converter(value, expression, connection)
1123 row[pos] = value
1124 yield row
File ~\OneDrive\Desktop\dev-2023\signal\lib\site-packages\django\db\backends\sqlite3\operations.py:313, in DatabaseOperations.get_decimalfield_converter.<locals>.converter(value, expression, connection)
311 def converter(value, expression, connection):
312 if value is not None:
--> 313 return create_decimal(value).quantize(quantize_value, context=expression.output_field.context)
TypeError: argument must be int or float
Any idea what is happening?? and how can I fix this?
QuerySet objects have it's own counting method. Use it:
qs = SignalSma.objects.all()
qs.count() # returns number of objects inside the queryset
I'm trying to fine-tune a Huggingface transformers BERT model on TPU. It works in Colab but fails when I switch to a paid TPU on GCP. Jupyter notebook code is as follows:
[1] model = transformers.TFBertModel.from_pretrained('bert-large-uncased-whole-word-masking-finetuned-squad')
# works
[2] cluster_resolver = tf.distribute.cluster_resolver.TPUClusterResolver(
tpu='[My TPU]',
zone='us-central1-a',
project='[My Project]'
)
tf.config.experimental_connect_to_cluster(cluster_resolver)
tf.tpu.experimental.initialize_tpu_system(cluster_resolver)
tpu_strategy = tf.distribute.experimental.TPUStrategy(cluster_resolver)
#Also works. Got a bunch of startup messages from the TPU - all good.
[3] with tpu_strategy.scope():
model = TFBertModel.from_pretrained('bert-large-uncased-whole-word-masking-finetuned-squad')
#Generates the error below (long). Same line works in Colab.
Here's the error message:
NotFoundError Traceback (most recent call last)
<ipython-input-14-2cfc1a238903> in <module>
1 with tpu_strategy.scope():
----> 2 model = TFBertModel.from_pretrained('bert-large-uncased-whole-word-masking-finetuned-squad')
~/.local/lib/python3.5/site-packages/transformers/modeling_tf_utils.py in from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
309 return load_pytorch_checkpoint_in_tf2_model(model, resolved_archive_file, allow_missing_keys=True)
310
--> 311 ret = model(model.dummy_inputs, training=False) # build the network with dummy inputs
312
313 assert os.path.isfile(resolved_archive_file), "Error retrieving file {}".format(resolved_archive_file)
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/keras/engine/base_layer.py in __call__(self, inputs, *args, **kwargs)
820 with base_layer_utils.autocast_context_manager(
821 self._compute_dtype):
--> 822 outputs = self.call(cast_inputs, *args, **kwargs)
823 self._handle_activity_regularization(inputs, outputs)
824 self._set_mask_metadata(inputs, outputs, input_masks)
~/.local/lib/python3.5/site-packages/transformers/modeling_tf_bert.py in call(self, inputs, **kwargs)
688
689 def call(self, inputs, **kwargs):
--> 690 outputs = self.bert(inputs, **kwargs)
691 return outputs
692
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/keras/engine/base_layer.py in __call__(self, inputs, *args, **kwargs)
820 with base_layer_utils.autocast_context_manager(
821 self._compute_dtype):
--> 822 outputs = self.call(cast_inputs, *args, **kwargs)
823 self._handle_activity_regularization(inputs, outputs)
824 self._set_mask_metadata(inputs, outputs, input_masks)
~/.local/lib/python3.5/site-packages/transformers/modeling_tf_bert.py in call(self, inputs, attention_mask, token_type_ids, position_ids, head_mask, inputs_embeds, training)
548
549 embedding_output = self.embeddings([input_ids, position_ids, token_type_ids, inputs_embeds], training=training)
--> 550 encoder_outputs = self.encoder([embedding_output, extended_attention_mask, head_mask], training=training)
551
552 sequence_output = encoder_outputs[0]
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/keras/engine/base_layer.py in __call__(self, inputs, *args, **kwargs)
820 with base_layer_utils.autocast_context_manager(
821 self._compute_dtype):
--> 822 outputs = self.call(cast_inputs, *args, **kwargs)
823 self._handle_activity_regularization(inputs, outputs)
824 self._set_mask_metadata(inputs, outputs, input_masks)
~/.local/lib/python3.5/site-packages/transformers/modeling_tf_bert.py in call(self, inputs, training)
365 all_hidden_states = all_hidden_states + (hidden_states,)
366
--> 367 layer_outputs = layer_module([hidden_states, attention_mask, head_mask[i]], training=training)
368 hidden_states = layer_outputs[0]
369
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/keras/engine/base_layer.py in __call__(self, inputs, *args, **kwargs)
820 with base_layer_utils.autocast_context_manager(
821 self._compute_dtype):
--> 822 outputs = self.call(cast_inputs, *args, **kwargs)
823 self._handle_activity_regularization(inputs, outputs)
824 self._set_mask_metadata(inputs, outputs, input_masks)
~/.local/lib/python3.5/site-packages/transformers/modeling_tf_bert.py in call(self, inputs, training)
341 hidden_states, attention_mask, head_mask = inputs
342
--> 343 attention_outputs = self.attention([hidden_states, attention_mask, head_mask], training=training)
344 attention_output = attention_outputs[0]
345 intermediate_output = self.intermediate(attention_output)
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/keras/engine/base_layer.py in __call__(self, inputs, *args, **kwargs)
820 with base_layer_utils.autocast_context_manager(
821 self._compute_dtype):
--> 822 outputs = self.call(cast_inputs, *args, **kwargs)
823 self._handle_activity_regularization(inputs, outputs)
824 self._set_mask_metadata(inputs, outputs, input_masks)
~/.local/lib/python3.5/site-packages/transformers/modeling_tf_bert.py in call(self, inputs, training)
290 input_tensor, attention_mask, head_mask = inputs
291
--> 292 self_outputs = self.self_attention([input_tensor, attention_mask, head_mask], training=training)
293 attention_output = self.dense_output([self_outputs[0], input_tensor], training=training)
294 outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/keras/engine/base_layer.py in __call__(self, inputs, *args, **kwargs)
820 with base_layer_utils.autocast_context_manager(
821 self._compute_dtype):
--> 822 outputs = self.call(cast_inputs, *args, **kwargs)
823 self._handle_activity_regularization(inputs, outputs)
824 self._set_mask_metadata(inputs, outputs, input_masks)
~/.local/lib/python3.5/site-packages/transformers/modeling_tf_bert.py in call(self, inputs, training)
222
223 batch_size = shape_list(hidden_states)[0]
--> 224 mixed_query_layer = self.query(hidden_states)
225 mixed_key_layer = self.key(hidden_states)
226 mixed_value_layer = self.value(hidden_states)
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/keras/engine/base_layer.py in __call__(self, inputs, *args, **kwargs)
820 with base_layer_utils.autocast_context_manager(
821 self._compute_dtype):
--> 822 outputs = self.call(cast_inputs, *args, **kwargs)
823 self._handle_activity_regularization(inputs, outputs)
824 self._set_mask_metadata(inputs, outputs, input_masks)
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/keras/layers/core.py in call(self, inputs)
1142 outputs = gen_math_ops.mat_mul(inputs, self.kernel)
1143 if self.use_bias:
-> 1144 outputs = nn.bias_add(outputs, self.bias)
1145 if self.activation is not None:
1146 return self.activation(outputs) # pylint: disable=not-callable
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/ops/nn_ops.py in bias_add(value, bias, data_format, name)
2756 else:
2757 return gen_nn_ops.bias_add(
-> 2758 value, bias, data_format=data_format, name=name)
2759
2760
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/ops/gen_nn_ops.py in bias_add(value, bias, data_format, name)
675 try:
676 return bias_add_eager_fallback(
--> 677 value, bias, data_format=data_format, name=name, ctx=_ctx)
678 except _core._SymbolicException:
679 pass # Add nodes to the TensorFlow graph.
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/ops/gen_nn_ops.py in bias_add_eager_fallback(value, bias, data_format, name, ctx)
703 data_format = "NHWC"
704 data_format = _execute.make_str(data_format, "data_format")
--> 705 _attr_T, _inputs_T = _execute.args_to_matching_eager([value, bias], ctx)
706 (value, bias) = _inputs_T
707 _inputs_flat = [value, bias]
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/eager/execute.py in args_to_matching_eager(l, ctx, default_dtype)
265 dtype = ret[-1].dtype
266 else:
--> 267 ret = [ops.convert_to_tensor(t, dtype, ctx=ctx) for t in l]
268
269 # TODO(slebedev): consider removing this as it leaks a Keras concept.
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/eager/execute.py in <listcomp>(.0)
265 dtype = ret[-1].dtype
266 else:
--> 267 ret = [ops.convert_to_tensor(t, dtype, ctx=ctx) for t in l]
268
269 # TODO(slebedev): consider removing this as it leaks a Keras concept.
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/framework/ops.py in convert_to_tensor(value, dtype, name, as_ref, preferred_dtype, dtype_hint, ctx, accepted_result_types)
1312
1313 if ret is None:
-> 1314 ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
1315
1316 if ret is NotImplemented:
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/distribute/values.py in _tensor_conversion_mirrored(var, dtype, name, as_ref)
1174 # allowing instances of the class to be used as tensors.
1175 def _tensor_conversion_mirrored(var, dtype=None, name=None, as_ref=False):
-> 1176 return var._dense_var_to_tensor(dtype=dtype, name=name, as_ref=as_ref) # pylint: disable=protected-access
1177
1178
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/distribute/values.py in _dense_var_to_tensor(self, dtype, name, as_ref)
908 if _enclosing_tpu_context() is None:
909 return super(TPUVariableMixin, self)._dense_var_to_tensor(
--> 910 dtype=dtype, name=name, as_ref=as_ref)
911 # pylint: enable=protected-access
912 elif dtype is not None and dtype != self.dtype:
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/distribute/values.py in _dense_var_to_tensor(self, dtype, name, as_ref)
1164 assert not as_ref
1165 return ops.convert_to_tensor(
-> 1166 self.get(), dtype=dtype, name=name, as_ref=as_ref)
1167
1168 def _clone_with_new_values(self, new_values):
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/distribute/values.py in get(self, device)
835 def get(self, device=None):
836 if (_enclosing_tpu_context() is None) or (device is not None):
--> 837 return super(TPUVariableMixin, self).get(device=device)
838 else:
839 raise NotImplementedError(
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/distribute/values.py in get(self, device)
320 device = distribute_lib.get_update_device()
321 if device is None:
--> 322 return self._get_cross_replica()
323 device = device_util.canonicalize(device)
324 return self._device_map.select_for_device(self._values, device)
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/distribute/values.py in _get_cross_replica(self)
1136 replica_id = self._device_map.replica_for_device(device)
1137 if replica_id is None:
-> 1138 return array_ops.identity(self.primary)
1139 return array_ops.identity(self._values[replica_id])
1140
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/util/dispatch.py in wrapper(*args, **kwargs)
178 """Call target, and fall back on dispatchers if there is a TypeError."""
179 try:
--> 180 return target(*args, **kwargs)
181 except (TypeError, ValueError):
182 # Note: convert_to_eager_tensor currently raises a ValueError, not a
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/ops/array_ops.py in identity(input, name)
265 # variables. Variables have correct handle data when graph building.
266 input = ops.convert_to_tensor(input)
--> 267 ret = gen_array_ops.identity(input, name=name)
268 # Propagate handle data for happier shape inference for resource variables.
269 if hasattr(input, "_handle_data"):
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/ops/gen_array_ops.py in identity(input, name)
3824 pass # Add nodes to the TensorFlow graph.
3825 except _core._NotOkStatusException as e:
-> 3826 _ops.raise_from_not_ok_status(e, name)
3827 # Add nodes to the TensorFlow graph.
3828 _, _, _op, _outputs = _op_def_library._apply_op_helper(
/usr/local/lib/python3.5/dist-packages/tensorflow_core/python/framework/ops.py in raise_from_not_ok_status(e, name)
6604 message = e.message + (" name: " + name if name is not None else "")
6605 # pylint: disable=protected-access
-> 6606 six.raise_from(core._status_to_exception(e.code, message), None)
6607 # pylint: enable=protected-access
6608
/usr/local/lib/python3.5/dist-packages/six.py in raise_from(value, from_value)
NotFoundError: '_MklMatMul' is neither a type of a primitive operation nor a name of a function registered in binary running on n-aa2fcfb7-w-0. One possible root cause is the client and server binaries are not built with the same version. Please make sure the operation or function is registered in the binary running in this process. [Op:Identity]
I posted this on the Huggingface github (https://github.com/huggingface/transformers/issues/2572) and they suggest the TPU server version may not match the TPU client version, but a) I don't know how to check for that nor b) what to do about it. Suggestions appreciated.
I created a customized Spacy model using https://github.com/explosion/spaCy/blob/master/examples/training/train_ner.py and when I am loading this model. It shows an error - look-behind requires fixed-width pattern. I am confused about how to solve this issue. Please help me. Any help will be appreciated. Thanks in advance.
output_dir = 'NLP_entity/model'
print("Loading from", output_dir)
nlp2 = spacy.load("NLP_entity/model")
test_text = "Remove from account"
#print()
doc2 = nlp1(test_text)
print(test_text)
#print()
if doc2.ents:
for ent in doc2.ents:
print("entity = {}, text = {}".format(ent.label_, ent.text))
else:
print("Entities in None")
Error:
('Loading from', 'NLP_entity/model')
errorTraceback (most recent call last)
<ipython-input-1-94981b2ca322> in <module>()
2 output_dir = 'NLP_entity/model'
3 print("Loading from", output_dir)
----> 4 nlp2 = spacy.load("NLP_entity/model")
5 test_text = "Remove from account".decode("utf-8")
6 #print()
/home/ubuntu/anaconda3/envs/python2/lib/python2.7/site-packages/spacy/__init__.pyc in load(name, **overrides)
25 if depr_path not in (True, False, None):
26 deprecation_warning(Warnings.W001.format(path=depr_path))
---> 27 return util.load_model(name, **overrides)
28
29
/home/ubuntu/anaconda3/envs/python2/lib/python2.7/site-packages/spacy/util.pyc in load_model(name, **overrides)
131 return load_model_from_package(name, **overrides)
132 if Path(name).exists(): # path to model data directory
--> 133 return load_model_from_path(Path(name), **overrides)
134 elif hasattr(name, "exists"): # Path or Path-like to model data
135 return load_model_from_path(name, **overrides)
/home/ubuntu/anaconda3/envs/python2/lib/python2.7/site-packages/spacy/util.pyc in load_model_from_path(model_path, meta, **overrides)
171 component = nlp.create_pipe(name, config=config)
172 nlp.add_pipe(component, name=name)
--> 173 return nlp.from_disk(model_path)
174
175
/home/ubuntu/anaconda3/envs/python2/lib/python2.7/site-packages/spacy/language.pyc in from_disk(self, path, exclude, disable)
784 # Convert to list here in case exclude is (default) tuple
785 exclude = list(exclude) + ["vocab"]
--> 786 util.from_disk(path, deserializers, exclude)
787 self._path = path
788 return self
/home/ubuntu/anaconda3/envs/python2/lib/python2.7/site-packages/spacy/util.pyc in from_disk(path, readers, exclude)
609 # Split to support file names like meta.json
610 if key.split(".")[0] not in exclude:
--> 611 reader(path / key)
612 return path
613
/home/ubuntu/anaconda3/envs/python2/lib/python2.7/site-packages/spacy/language.pyc in <lambda>(p)
774 deserializers["meta.json"] = lambda p: self.meta.update(srsly.read_json(p))
775 deserializers["vocab"] = lambda p: self.vocab.from_disk(p) and _fix_pretrained_vectors_name(self)
--> 776 deserializers["tokenizer"] = lambda p: self.tokenizer.from_disk(p, exclude=["vocab"])
777 for name, proc in self.pipeline:
778 if name in exclude:
tokenizer.pyx in spacy.tokenizer.Tokenizer.from_disk()
tokenizer.pyx in spacy.tokenizer.Tokenizer.from_bytes()
/home/ubuntu/anaconda3/envs/python2/lib/python2.7/re.pyc in compile(pattern, flags)
192 def compile(pattern, flags=0):
193 "Compile a regular expression pattern, returning a pattern object."
--> 194 return _compile(pattern, flags)
195
196 def purge():
/home/ubuntu/anaconda3/envs/python2/lib/python2.7/re.pyc in _compile(*key)
249 p = sre_compile.compile(pattern, flags)
250 except error, v:
--> 251 raise error, v # invalid expression
252 if not bypass_cache:
253 if len(_cache) >= _MAXCACHE:
error: look-behind requires fixed-width pattern
I'm trying to get a queryset where a Task has two Issuances which have been created within 10 seconds of one another.
Models are as follows:
Class Task(models.Model):
# stuff
Class Issuance(models.Model):
task = models.ForeignKey(Task, blank=True, null=True, on_delete=models.SET_NULL)
created = models.DateTimeField(default=timezone.now)
What I've got so far:
qs = (
Task.objects
.annotate(count=Count('issuance'))
.filter(count__gt=1, count__lte=2)
.annotate(time_difference=F('issuance__created')) # Need to fix this
.annotate(
dupe=Case(
When(
time_difference__lt=10, # Less than 10 seconds
then=Value(1),
),
default=Value(0),
output=BooleanField(),
)
)
)
I think I'm pretty close but I need some way to calculate the time difference between the creation date of the two issuances for any one Task. Also, need to do a comparison that the time delta is less than 10 seconds; not sure if what I have will work.
Can anyone help, please?
EDIT: Added output of query
TypeError Traceback (most recent call last)
<ipython-input-47-e0e60776551e> in <module>()
11 ),
12 default=Value(0),
---> 13 output=BooleanField(),
14 )
15 )
/usr/local/lib/python3.6/site-packages/django/db/models/query.py in annotate(self, *args, **kwargs)
912 raise ValueError("The annotation '%s' conflicts with a field on "
913 "the model." % alias)
--> 914 clone.query.add_annotation(annotation, alias, is_summary=False)
915
916 for alias, annotation in clone.query.annotations.items():
/usr/local/lib/python3.6/site-packages/django/db/models/sql/query.py in add_annotation(self, annotation, alias, is_summary)
969 """
970 annotation = annotation.resolve_expression(self, allow_joins=True, reuse=None,
--> 971 summarize=is_summary)
972 self.append_annotation_mask([alias])
973 self.annotations[alias] = annotation
/usr/local/lib/python3.6/site-packages/django/db/models/expressions.py in resolve_expression(self, query, allow_joins, reuse, summarize, for_save)
827 c.is_summary = summarize
828 for pos, case in enumerate(c.cases):
--> 829 c.cases[pos] = case.resolve_expression(query, allow_joins, reuse, summarize, for_save)
830 c.default = c.default.resolve_expression(query, allow_joins, reuse, summarize, for_save)
831 return c
/usr/local/lib/python3.6/site-packages/django/db/models/expressions.py in resolve_expression(self, query, allow_joins, reuse, summarize, for_save)
760 c.is_summary = summarize
761 if hasattr(c.condition, 'resolve_expression'):
--> 762 c.condition = c.condition.resolve_expression(query, allow_joins, reuse, summarize, False)
763 c.result = c.result.resolve_expression(query, allow_joins, reuse, summarize, for_save)
764 return c
/usr/local/lib/python3.6/site-packages/django/db/models/query_utils.py in resolve_expression(self, query, allow_joins, reuse, summarize, for_save)
79 # We must promote any new joins to left outer joins so that when Q is
80 # used as an expression, rows aren't filtered due to joins.
---> 81 clause, joins = query._add_q(self, reuse, allow_joins=allow_joins, split_subq=False)
82 query.promote_joins(joins)
83 return clause
/usr/local/lib/python3.6/site-packages/django/db/models/sql/query.py in _add_q(self, q_object, used_aliases, branch_negated, current_negated, allow_joins, split_subq)
1251 child, can_reuse=used_aliases, branch_negated=branch_negated,
1252 current_negated=current_negated, connector=connector,
-> 1253 allow_joins=allow_joins, split_subq=split_subq,
1254 )
1255 joinpromoter.add_votes(needed_inner)
/usr/local/lib/python3.6/site-packages/django/db/models/sql/query.py in build_filter(self, filter_expr, branch_negated, current_negated, can_reuse, connector, allow_joins, split_subq)
1141 clause = self.where_class()
1142 if reffed_expression:
-> 1143 condition = self.build_lookup(lookups, reffed_expression, value)
1144 clause.add(condition, AND)
1145 return clause, []
/usr/local/lib/python3.6/site-packages/django/db/models/sql/query.py in build_lookup(self, lookups, lhs, rhs)
1081 lhs = self.try_transform(lhs, name, lookups)
1082 final_lookup = lhs.get_lookup('exact')
-> 1083 return final_lookup(lhs, rhs)
1084 lhs = self.try_transform(lhs, name, lookups)
1085 lookups = lookups[1:]
/usr/local/lib/python3.6/site-packages/django/db/models/lookups.py in __init__(self, lhs, rhs)
17 def __init__(self, lhs, rhs):
18 self.lhs, self.rhs = lhs, rhs
---> 19 self.rhs = self.get_prep_lookup()
20 if hasattr(self.lhs, 'get_bilateral_transforms'):
21 bilateral_transforms = self.lhs.get_bilateral_transforms()
/usr/local/lib/python3.6/site-packages/django/db/models/lookups.py in get_prep_lookup(self)
57 return self.rhs._prepare(self.lhs.output_field)
58 if self.prepare_rhs and hasattr(self.lhs.output_field, 'get_prep_value'):
---> 59 return self.lhs.output_field.get_prep_value(self.rhs)
60 return self.rhs
61
/usr/local/lib/python3.6/site-packages/django/db/models/fields/__init__.py in get_prep_value(self, value)
1415
1416 def get_prep_value(self, value):
-> 1417 value = super(DateTimeField, self).get_prep_value(value)
1418 value = self.to_python(value)
1419 if value is not None and settings.USE_TZ and timezone.is_naive(value):
/usr/local/lib/python3.6/site-packages/django/db/models/fields/__init__.py in get_prep_value(self, value)
1273 def get_prep_value(self, value):
1274 value = super(DateField, self).get_prep_value(value)
-> 1275 return self.to_python(value)
1276
1277 def get_db_prep_value(self, value, connection, prepared=False):
/usr/local/lib/python3.6/site-packages/django/db/models/fields/__init__.py in to_python(self, value)
1376
1377 try:
-> 1378 parsed = parse_datetime(value)
1379 if parsed is not None:
1380 return parsed
/usr/local/lib/python3.6/site-packages/django/utils/dateparse.py in parse_datetime(value)
91 Returns None if the input isn't well formatted.
92 """
---> 93 match = datetime_re.match(value)
94 if match:
95 kw = match.groupdict()
TypeError: expected string or bytes-like object
I am trying to write to a *.xlsx file with the openpyxl module. I have downloaded it using pip install on Ubuntu 14.04.
I am loading in a *.xlsx file that is a Bill Of Materials template I use that was previously made in Excel and I can open it just fine in Libre Office and Kingsoft Office. My intention is to load it and fill in some cells with some strings.
My function looks like this:
def writeBOM(parts, projectname):
'''
Take the ordered and grouped part info and
write it to a standard BOM and save it
'''
StandardBOMFILE = '/home/jesse/Digi-Parser/SampleFiles/StandardBOM.xlsx'
wb = load_workbook(filename=StandardBOMFILE)
sheet = wb.get_sheet_by_name('BOM')
r = 8
# Fill BOM
for i, part in enumerate(parts):
sheet.cell(row = r+i,column = 1).value = part.designator
sheet.cell(row = r+i,column = 2).value = part.evalue + ' ' + part.package
sheet.cell(row = r+i, column = 3).value = part.qty
projectBOMname = projectname + 'BOM' + '.xlsx'
wb.save(projectBOMname)
The values that I am putting into the cells are just strings.
However when I run this I get the following Error:
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
/usr/lib/python2.7/dist-packages/IPython/utils/py3compat.pyc in execfile(fname, *where)
202 else:
203 filename = fname
--> 204 __builtin__.execfile(filename, *where)
/home/jesse/Digi-Parser/SheetOrganizer.py in <module>()
232 prjpath = '/home/jesse/Digi-Parser/SampleFiles/'
233 prjname = 'Water Use Monitor'
--> 234 things = csvToBOM(prjpath, prjname)
/home/jesse/Digi-Parser/SheetOrganizer.py in csvToBOM(projectpath, projectname)
223 orderedBody = combineSameComponents(reorderParts(body))
224
--> 225 writeBOM(orderedBody, projectname)
226
227
/home/jesse/Digi-Parser/SheetOrganizer.py in writeBOM(parts, projectname)
192 sheet.cell(row = r+i, column = 3).value = part.qty
193 projectBOMname = projectname + 'BOM' + '.xlsx'
--> 194 wb.save(projectBOMname)
195
196
/usr/local/lib/python2.7/dist-packages/openpyxl/workbook/workbook.pyc in save(self, filename)
265 save_dump(self, filename)
266 else:
--> 267 save_workbook(self, filename)
/usr/local/lib/python2.7/dist-packages/openpyxl/writer/excel.pyc in save_workbook(workbook, filename)
183 """
184 writer = ExcelWriter(workbook)
--> 185 writer.save(filename)
186 return True
187
/usr/local/lib/python2.7/dist-packages/openpyxl/writer/excel.pyc in save(self, filename)
166 """Write data into the archive."""
167 archive = ZipFile(filename, 'w', ZIP_DEFLATED)
--> 168 self.write_data(archive)
169 archive.close()
170
/usr/local/lib/python2.7/dist-packages/openpyxl/writer/excel.pyc in write_data(self, archive)
78 archive.writestr(ARC_WORKBOOK_RELS, write_workbook_rels(self.workbook))
79 archive.writestr(ARC_APP, write_properties_app(self.workbook))
---> 80 archive.writestr(ARC_CORE, write_properties_core(self.workbook.properties))
81 if self.workbook.loaded_theme:
82 archive.writestr(ARC_THEME, self.workbook.loaded_theme)
/usr/local/lib/python2.7/dist-packages/openpyxl/writer/workbook.pyc in write_properties_core(properties)
65 SubElement(root, '{%s}created' % DCTERMS_NS,
66 {'{%s}type' % XSI_NS: '%s:W3CDTF' % DCTERMS_PREFIX}).text = \
---> 67 datetime_to_W3CDTF(properties.created)
68 SubElement(root, '{%s}modified' % DCTERMS_NS,
69 {'{%s}type' % XSI_NS: '%s:W3CDTF' % DCTERMS_PREFIX}).text = \
/usr/local/lib/python2.7/dist-packages/openpyxl/date_time.pyc in datetime_to_W3CDTF(dt)
54 def datetime_to_W3CDTF(dt):
55 """Convert from a datetime to a timestamp string."""
---> 56 return datetime.datetime.strftime(dt, W3CDTF_FORMAT)
57
58
ValueError: year=1899 is before 1900; the datetime strftime() methods require year >= 1900
I can not figure out how to fix this. I don't need to have Excel installed on my computer do I? It seems the issue is in the date_time.py file in the openpyxl package and that the variable 'dt' is set to 1899 for some reason.
Thank you for any help.
As discussed on the mailing list this looks like a bug in the Python datetime library.
https://docs.python.org/2/library/datetime.html#strftime-strptime-behavior
There is not a problem with Python 3.3 or 3.4