Python query ElasticSearch path with backslash - python-2.7

I'm using the Elastic search extension to Python, trying to query specific path.
Here is my wrapped query:
{
"size": 1000,
"query": {
"filtered": {
"filter": {
"bool": {
"must": [
{
"term": {
"Path": "c:\\myfolder\\myfile.txt"
}
}
]
}
}
}
}
}
Which works fine in kopf plugin.
Here is my Python code:
from elasticsearch import Elasticsearch
es = Elasticsearch(hosts=['my_server'])
index = "my_index"
query = '{"size":1000,"query":{"filtered":{"filter":{"bool":{"must":[{"term":{"Path":"c:\\myfolder\\myfile.txt"}}]}}}}}'
response = es.search(index=index, body=query)
For some reason I'm getting this error (which will not occur without the backslash):
/usr/local/lib/python2.7/dist-packages/elasticsearch/client/utils.py",
line 69, in _wrapped
return func(*args, params=params, **kwargs) File "/usr/local/lib/python2.7/dist-packages/elasticsearch/client/init.py",
line 530, in search
doc_type, '_search'), params=params, body=body) File "/usr/local/lib/python2.7/dist-packages/elasticsearch/transport.py",
line 329, in perform_request
status, headers, data = connection.perform_request(method, url, params, body, ignore=ignore, timeout=timeout) File
"/usr/local/lib/python2.7/dist-packages/elasticsearch/connection/http_urllib3.py",
line 106, in perform_request
self._raise_error(response.status, raw_data) File "/usr/local/lib/python2.7/dist-packages/elasticsearch/connection/base.py",
line 105, in _raise_error
raise HTTP_EXCEPTIONS.get(status_code, TransportError)(status_code, error_message, additional_info)
elasticsearch.exceptions.RequestError
This problem happens only when there are backslashes.
Note: I'm working on Ubuntu.
Thanks in advance.

Try changing the "Path" to c:\\\\myfolder\\\\myfile.txt.

Related

Airflow 2.x bucket and range argument for ExternalTable(Ghseets) Operator from Gsheets

I am having problems using the new Airflow operator BigQueryCreateExternalTableOperator within Google-Composer:
Question 1
After creating an Airflow task this is happening :
AttributeError: 'BigQueryCreateExternalTableOperator' object has no attribute 'bucket'
However, as I am querying a gsheets file why it is looking for bucket argument? I am getting crazy trying to find what is happening! According to the docs it is optional!
Sample Code
task1 = BigQueryCreateExternalTableOperator(
task_id="task1_externaltable",
table_resource={
"tableReference": {
"projectId": projectid,
"datasetId": datasetid,
"tableId": tableid,
},
"schema": schema_fields,
"externalDataConfiguration": {
"sourceFormat": "GOOGLE_SHEETS",
"autodetect": False,
"compression": "NONE",
"googleSheetsOptions": {
"skipLeadingRows": 1,
"range": gsheets_tab_name,
},
"sourceUris": gsheets_url,
},
},
)
Following Elad's suggestion, error traceback!:
AttributeError: 'BigQueryCreateExternalTableOperator' object has no attribute 'bucket'
[2022-03-18, 14:45:38 UTC] {taskinstance.py:1268} INFO - Marking task as UP_FOR_RETRY. dag_id=trm_analytics_attribution_collision_checker_dag, task_id=create_manual_attribution_2_external_table, execution_date=20220318T144520, start_date=20220318T144536, end_date=20220318T144538
[2022-03-18, 14:45:38 UTC] {standard_task_runner.py:89} ERROR - Failed to execute job 444 for task create_manual_attribution_2_external_table
Traceback (most recent call last):
File "/opt/python3.8/lib/python3.8/site-packages/airflow/task/task_runner/standard_task_runner.py", line 85, in _start_by_fork
args.func(args, dag=self.dag)
File "/opt/python3.8/lib/python3.8/site-packages/airflow/cli/cli_parser.py", line 48, in command
return func(*args, **kwargs)
File "/opt/python3.8/lib/python3.8/site-packages/airflow/utils/cli.py", line 94, in wrapper
return f(*args, **kwargs)
File "/opt/python3.8/lib/python3.8/site-packages/airflow/cli/commands/task_command.py", line 302, in task_run
_run_task_by_selected_method(args, dag, ti)
File "/opt/python3.8/lib/python3.8/site-packages/airflow/cli/commands/task_command.py", line 107, in _run_task_by_selected_method
_run_raw_task(args, ti)
File "/opt/python3.8/lib/python3.8/site-packages/airflow/cli/commands/task_command.py", line 180, in _run_raw_task
ti._run_raw_task(
File "/opt/python3.8/lib/python3.8/site-packages/airflow/utils/session.py", line 70, in wrapper
return func(*args, session=session, **kwargs)
File "/opt/python3.8/lib/python3.8/site-packages/airflow/models/taskinstance.py", line 1330, in _run_raw_task
self._execute_task_with_callbacks(context)
File "/opt/python3.8/lib/python3.8/site-packages/airflow/models/taskinstance.py", line 1420, in _execute_task_with_callbacks
self.render_templates(context=context)
File "/opt/python3.8/lib/python3.8/site-packages/airflow/models/taskinstance.py", line 1995, in render_templates
self.task.render_template_fields(context)
File "/opt/python3.8/lib/python3.8/site-packages/airflow/models/baseoperator.py", line 1061, in render_template_fields
self._do_render_template_fields(self, self.template_fields, context, jinja_env, set())
File "/opt/python3.8/lib/python3.8/site-packages/airflow/models/baseoperator.py", line 1072, in _do_render_template_fields
content = getattr(parent, attr_name)
AttributeError: 'BigQueryCreateExternalTableOperator' object has no attribute 'bucket'
[2022-03-18, 14:45:38 UTC] {local_task_job.py:154} INFO - Task exited with return code 1
[2022-03-18, 14:45:38 UTC] {local_task_job.py:264} INFO - 0 downstream tasks scheduled from follow-on schedule check

TPU VM access Cloud Storage 403 forbidden when writing files

When I run my python command to train my model on my tpu-vm, it failed on writing files to Cloud Storage.
Traceback (most recent call last):
File "device_train.py", line 302, in <module>
save(network, step, bucket, model_dir,
File "device_train.py", line 62, in save
with open(f"gs://{bucket}/{path}/meta.json", "w") as f:
File "/usr/local/lib/python3.8/dist-packages/smart_open/smart_open_lib.py", line 235, in open
binary = _open_binary_stream(uri, binary_mode, transport_params)
File "/usr/local/lib/python3.8/dist-packages/smart_open/smart_open_lib.py", line 398, in _open_binary_stream
fobj = submodule.open_uri(uri, mode, transport_params)
File "/usr/local/lib/python3.8/dist-packages/smart_open/gcs.py", line 105, in open_uri
return open(parsed_uri['bucket_id'], parsed_uri['blob_id'], mode, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/smart_open/gcs.py", line 146, in open
fileobj = Writer(
File "/usr/local/lib/python3.8/dist-packages/smart_open/gcs.py", line 427, in __init__
self._resumable_upload_url = self._blob.create_resumable_upload_session()
File "/usr/local/lib/python3.8/dist-packages/google/cloud/storage/blob.py", line 2728, in create_resumable_upload_session
_raise_from_invalid_response(exc)
File "/usr/local/lib/python3.8/dist-packages/google/cloud/storage/blob.py", line 3936, in _raise_from_invalid_response
raise exceptions.from_http_status(response.status_code, message, response=response)
google.api_core.exceptions.Forbidden: 403 POST https://storage.googleapis.com/upload/storage/v1/b/my-bucket/o?uploadType=resumable: {
"error": {
"code": 403,
"message": "Access denied.",
"errors": [
{
"message": "Access denied.",
"domain": "global",
"reason": "forbidden"
}
]
}
}
: ('Request failed with status code', 403, 'Expected one of', <HTTPStatus.OK: 200>, <HTTPStatus.CREATED: 201>)
I have two service accounts, one is like project-id-compute#developer.gserviceaccount.com, and the other is like service-project-id#cloud-tpu.iam.gserviceaccount.com.
I try to add the storage admin to both of them. But it doesn't work.
Looking for your help!

negative timestamp in installed ssm patches list of EC2 instance

I want to get all the installed patches on an AWS EC2 instance, So I run this code in boto3:
response = client.describe_instance_patches(InstanceId=instance_id, Filters=[{'Key': 'State','Values': ['Installed',]} ])
My instance has a patch with a negative timestamp :
{
"Patches": [
{
"KBId": "KB3178539",
"Severity": "Important",
"Classification": "SecurityUpdates",
"Title": "Security Update for Windows 8.1 (KB3178539)",
"State": "Installed",
"InstalledTime": 1483574400.0
},
{
"KBId": "KB4493446",
"Severity": "Critical",
"Classification": "SecurityUpdates",
"Title": "2019-04 Security Monthly Quality Rollup for Windows 8.1 for x64-based Systems (KB4493446)",
"State": "Installed",
"InstalledTime": 1555804800.0
},
{
"KBId": "KB4487080",
"Severity": "Important",
"Classification": "SecurityUpdates",
"Title": "2019-02 Security and Quality Rollup for .NET Framework 3.5, 4.5.2, 4.6, 4.6.1, 4.6.2, 4.7, 4.7.1, 4.7.2 for Windows 8.1 (KB4487080)",
"State": "Installed",
"InstalledTime": -62135596800.0
}
]
}
So my boto3 snippet gives me this error:
response = client.describe_instance_patches(InstanceId=instance_id, Filters=[{'Key': 'State','Values': ['Installed',]}, ])
File "/usr/local/lib/python2.7/dist-packages/botocore/client.py", line 357, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/usr/local/lib/python2.7/dist-packages/botocore/client.py", line 648, in _make_api_call
operation_model, request_dict, request_context)
File "/usr/local/lib/python2.7/dist-packages/botocore/client.py", line 667, in _make_request
return self._endpoint.make_request(operation_model, request_dict)
File "/usr/local/lib/python2.7/dist-packages/botocore/endpoint.py", line 102, in make_request
return self._send_request(request_dict, operation_model)
File "/usr/local/lib/python2.7/dist-packages/botocore/endpoint.py", line 135, in _send_request
request, operation_model, context)
File "/usr/local/lib/python2.7/dist-packages/botocore/endpoint.py", line 167, in _get_response
request, operation_model)
File "/usr/local/lib/python2.7/dist-packages/botocore/endpoint.py", line 218, in _do_get_response
response_dict, operation_model.output_shape)
File "/usr/local/lib/python2.7/dist-packages/botocore/parsers.py", line 242, in parse
parsed = self._do_parse(response, shape)
File "/usr/local/lib/python2.7/dist-packages/botocore/parsers.py", line 740, in _do_parse
parsed = self._handle_json_body(response['body'], shape)
File "/usr/local/lib/python2.7/dist-packages/botocore/parsers.py", line 761, in _handle_json_body
return self._parse_shape(shape, parsed_json)
File "/usr/local/lib/python2.7/dist-packages/botocore/parsers.py", line 302, in _parse_shape
return handler(shape, node)
File "/usr/local/lib/python2.7/dist-packages/botocore/parsers.py", line 572, in _handle_structure
raw_value)
File "/usr/local/lib/python2.7/dist-packages/botocore/parsers.py", line 302, in _parse_shape
return handler(shape, node)
File "/usr/local/lib/python2.7/dist-packages/botocore/parsers.py", line 310, in _handle_list
parsed.append(self._parse_shape(member_shape, item))
File "/usr/local/lib/python2.7/dist-packages/botocore/parsers.py", line 302, in _parse_shape
return handler(shape, node)
File "/usr/local/lib/python2.7/dist-packages/botocore/parsers.py", line 572, in _handle_structure
raw_value)
File "/usr/local/lib/python2.7/dist-packages/botocore/parsers.py", line 302, in _parse_shape
return handler(shape, node)
File "/usr/local/lib/python2.7/dist-packages/botocore/parsers.py", line 589, in _handle_timestamp
return self._timestamp_parser(value)
File "/usr/local/lib/python2.7/dist-packages/botocore/utils.py", line 558, in parse_timestamp
return datetime.datetime.fromtimestamp(value, tzlocal())
File "/usr/local/lib/python2.7/dist-packages/dateutil/tz/_common.py", line 144, in fromutc
return f(self, dt)
File "/usr/local/lib/python2.7/dist-packages/dateutil/tz/_common.py", line 258, in fromutc
dt_wall = self._fromutc(dt)
File "/usr/local/lib/python2.7/dist-packages/dateutil/tz/_common.py", line 222, in _fromutc
dtoff = dt.utcoffset()
File "/usr/local/lib/python2.7/dist-packages/dateutil/tz/tz.py", line 216, in utcoffset
if self._isdst(dt):
File "/usr/local/lib/python2.7/dist-packages/dateutil/tz/tz.py", line 288, in _isdst
if self.is_ambiguous(dt):
File "/usr/local/lib/python2.7/dist-packages/dateutil/tz/tz.py", line 250, in is_ambiguous
(naive_dst != self._naive_is_dst(dt - self._dst_saved)))
OverflowError: date value out of range
I need to get the installed patches of several instances and I don't want the script to break when it finds a negative timestamp. How can workaround this ? How can I use the filters to get only valid timestamps ?
try:
response = client.describe_instance_patches(InstanceId=instance_id, Filters=[{'Key': 'State','Values': ['Installed',]} ])
except OverflowError:
//handle the exception as you want. Print a log, ignore, whatever.
I got this working by using subprocess with aws cli instead of boto3:
response = subprocess.Popen(["aws", "ssm", "describe-instance-patches", "--instance-id", instance_id, "--filters","Key=State,Values=Installed", "--profil", "prod", "--output", "json"], stdout=subprocess.PIPE).stdout.read()

Django Postgres DeserializationError: Problem installing fixture

I am trying to import my developement database fixtures into my production server. The migrations have been made and I tried slimming down the database migration to only two classes. This is the command and error I am receiving:
Error
(env) root#django-01:/home/projects/server/mysite# python3 local.py loaddata db.json
Traceback (most recent call last):
File "/home/projects/server/env/lib/python3.5/site-packages/django/core/serializers/json.py", line 68, in Deserializer
objects = json.loads(stream_or_string)
File "/usr/lib/python3.5/json/__init__.py", line 319, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.5/json/decoder.py", line 339, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.5/json/decoder.py", line 357, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 19 column 1 (char 229)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "local.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/home/projects/server/env/lib/python3.5/site-packages/django/core/management/__init__.py", line 371, in execute_from_command_line
utility.execute()
File "/home/projects/server/env/lib/python3.5/site-packages/django/core/management/__init__.py", line 365, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/home/projects/server/env/lib/python3.5/site-packages/django/core/management/base.py", line 288, in run_from_argv
self.execute(*args, **cmd_options)
File "/home/projects/server/env/lib/python3.5/site-packages/django/core/management/base.py", line 335, in execute
output = self.handle(*args, **options)
File "/home/projects/server/env/lib/python3.5/site-packages/django/core/management/commands/loaddata.py", line 72, in handle
self.loaddata(fixture_labels)
File "/home/projects/server/env/lib/python3.5/site-packages/django/core/management/commands/loaddata.py", line 113, in loaddata
self.load_label(fixture_label)
File "/home/projects/server/env/lib/python3.5/site-packages/django/core/management/commands/loaddata.py", line 168, in load_label
for obj in objects:
File "/home/projects/server/env/lib/python3.5/site-packages/django/core/serializers/json.py", line 73, in Deserializer
raise DeserializationError() from exc
django.core.serializers.base.DeserializationError: Problem installing fixture '/home/projects/server/mysite/db.json':
db.json
[
{
"model": "memberships.benefit",
"pk": 1,
"fields": {
"name": "SSL Certificate"
}
},
{
"model": "memberships.benefit",
"pk": 2,
"fields": {
"name": "No cancellation fee"
}
},
]
You need use proper json format, remove last (,) from your json
[
{
"model": "memberships.benefit",
"pk": 1,
"fields": {
"name": "SSL Certificate"
}
},
{
"model": "memberships.benefit",
"pk": 2,
"fields": {
"name": "No cancellation fee"
}
}
]
refer this
hope it helps

ADL LRS saves statement but responds with HTTP 500

I've installed ADL LRS following README.txt on https://github.com/adlnet/ADL_LRS.
I'm testing it with TincanJS (https://github.com/RusticiSoftware/TinCanJS) with following code, statement gets saved but response code is 500.
lrs = new TinCan.LRS(
{
////// This works well
endpoint: "https://cloud.scorm.com/tc/public/",
username: "<Test User>",
password: "<Test Password>",
allowFail: false
////// This returns Internal Error(500)
endpoint: "my.server:8000/xapi/",
username: "<My User>",
password: "<My Password>",
allowFail: false
}
);
var statement = new TinCan.Statement(
{
actor: {
"name": "Ac Tor",
"account": {
"homePage": "http://act.or",
"name": "actor"
}
},
verb: {
id: "http://adlnet.gov/expapi/verbs/experienced"
},
target: {
id: "http://rusticisoftware.github.com/TinCanJS"
}
}
);
lrs.saveStatement(
statement,
{
callback: function (err, xhr) {
//...
}
}
);
Log at /ADL_LRS/../logs/lrs.log:
2017-03-15 10:33:31,248 [ERROR] lrs.views: 500 === /xapi/statements
Traceback (most recent call last):
File "/home/kursat/ADL_LRS/lrs/views.py", line 214, in handle_request
return processors[path][req_dict['method']](req_dict)
File "/home/kursat/ADL_LRS/lrs/utils/req_process.py", line 164, in statements_put
check_activity_metadata.delay(stmt_ids)
File "/home/kursat/env/local/lib/python2.7/site-packages/celery/app/task.py", line 453, in delay
return self.apply_async(args, kwargs)
File "/home/kursat/env/local/lib/python2.7/site-packages/celery/app/task.py", line 560, in apply_async
**dict(self._get_exec_options(), **options)
File "/home/kursat/env/local/lib/python2.7/site-packages/celery/app/base.py", line 354, in send_task
reply_to=reply_to or self.oid, **options
File "/home/kursat/env/local/lib/python2.7/site-packages/celery/app/amqp.py", line 305, in publish_task
**kwargs
File "/home/kursat/env/local/lib/python2.7/site-packages/kombu/messaging.py", line 172, in publish
routing_key, mandatory, immediate, exchange, declare)
File "/home/kursat/env/local/lib/python2.7/site-packages/kombu/connection.py", line 470, in _ensured
interval_max)
File "/home/kursat/env/local/lib/python2.7/site-packages/kombu/connection.py", line 382, in ensure_connection
interval_start, interval_step, interval_max, callback)
File "/home/kursat/env/local/lib/python2.7/site-packages/kombu/utils/__init__.py", line 246, in retry_over_time
return fun(*args, **kwargs)
File "/home/kursat/env/local/lib/python2.7/site-packages/kombu/connection.py", line 250, in connect
return self.connection
File "/home/kursat/env/local/lib/python2.7/site-packages/kombu/connection.py", line 756, in connection
self._connection = self._establish_connection()
File "/home/kursat/env/local/lib/python2.7/site-packages/kombu/connection.py", line 711, in _establish_connection
conn = self.transport.establish_connection()
File "/home/kursat/env/local/lib/python2.7/site-packages/kombu/transport/pyamqp.py", line 116, in establish_connection
conn = self.Connection(**opts)
File "/home/kursat/env/local/lib/python2.7/site-packages/amqp/connection.py", line 183, in __init__
return self._x_open(virtual_host)
File "/home/kursat/env/local/lib/python2.7/site-packages/amqp/connection.py", line 628, in _x_open
(10, 41), # Connection.open_ok
File "/home/kursat/env/local/lib/python2.7/site-packages/amqp/abstract_channel.py", line 67, in wait
self.channel_id, allowed_methods, timeout)
File "/home/kursat/env/local/lib/python2.7/site-packages/amqp/connection.py", line 241, in _wait_method
channel, method_sig, args, content = read_timeout(timeout)
File "/home/kursat/env/local/lib/python2.7/site-packages/amqp/connection.py", line 330, in read_timeout
return self.method_reader.read_method()
File "/home/kursat/env/local/lib/python2.7/site-packages/amqp/method_framing.py", line 189, in read_method
raise m
error: [Errno 104] Connection reset by peer
Thanks in advance.
There are log files in the log/ folder created during installation of the LRS. Inside there is an lrs.log file that will tell you what is causing the 500 error. I was able to send statements to a local LRS instance with no 500 error. Please respond back with the error so I can help. Thanks
Unless this is just a quirk of TinCanJS, I would think target should be object.