images not labeled after import vertexAI image object detection - google-cloud-ml

I'm using this example to import my images and annotation data (JSONL file) into my vertexAI dataset:
https://cloud.google.com/vertex-ai/docs/samples/aiplatform-import-data-image-object-detection-sample
from google.cloud import aiplatform
import os
#https://cloud.google.com/vertex-ai/docs/samples/aiplatform-import-data-image-object-detection-sample
def import_data_image_object_detection_sample(
project: str = proj,
dataset_id: str = datasetID,
gcs_source_uri: str = cloudURIToInputFile,
location: str = "us-central1",
api_endpoint: str = "us-central1-aiplatform.googleapis.com",
timeout: int = 1800,
):
# The AI Platform services require regional API endpoints.
client_options = {"api_endpoint": api_endpoint}
# Initialize client that will be used to create and send requests.
# This client only needs to be created once, and can be reused for multiple requests.
client = aiplatform.gapic.DatasetServiceClient(client_options=client_options)
import_configs = [
{
"gcs_source": {"uris": [gcs_source_uri]},
"import_schema_uri": "gs://google-cloud-aiplatform/schema/dataset/ioformat/image_bounding_box_io_format_1.0.0.yaml",
}
]
name = client.dataset_path(project=project, location=location, dataset=dataset_id)
response = client.import_data(name=name, import_configs=import_configs)
print("Long running operation:", response.operation.name)
import_data_response = response.result(timeout=timeout)
print("import_data_response:", import_data_response)
import_data_image_object_detection_sample()
After I run this, all of the images are uploaded successfully, but none of the labels show up on the images. I was able to import them all successfully with the labels the other day, so I am not sure what is happening now. Thanks

Related

Webhook Payload Result is null in Dialogflow but not in Postman or Locally

I am failing to send over my personal banking data via a flask webhook from the Nordigen API to Dialogflow via fulfilment as only null is being received within the Dialogflow payload:
{
"fulfillmentText": "Your text response",
"fulfillmentMessages": [
{
"payload": [
null
]
}
]
}
The webhook error message is: Webhook call failed. Error: Failed to parse webhook JSON response: Expect a map object but found: [null].
When I just send the data as a fulfillmentText I receive "fulfillmentText": null.
I have tested my webhook with postman and there - as well as locally and other webhook'esque tests - everything is fine as I receive my most recent banking data.
The overall flow is twofold and simple:
User gets the correct banking and user specific login link to a specified bank, copy & pastes it to perform banking login by query_text = 'login'.
After a successful banking login the user can fetch different kinds of banking data (like balance) by query_text = 'balance'.
I went crazy with overengineering the flask webhook as I tried out many different things like asynchronous functions, uploading my Flask app to Heroku or CORS. I have even implemented an OAuth2 process where the user would query_text = 'google auth' and initiate the OAuth2 process in a step 0) by creating OAuth2 credentials and the Flask-Dance Python package. (Even though I have hardcoded the OAuth2 redirect link but this shouldn't be an issue atm). I was even trying to trick Dialogflow by creating a small Sqlite3 db within my webhook to at least upload the data there and then use it but without success.
So my question is .. what am I missing here? Why do I receive my banking data everywhere else but not in Dialogflow. My intuition is telling me Google is blocking this data for whatever reason.
Honestly I just don't know how to continue and I would appreciate any helpful comments!
This is my Flask webhook:
from dialogflow_fulfillment import QuickReplies, WebhookClient, Payload
from flask import Flask, request, jsonify, make_response, session, render_template, redirect, url_for
from flask_cors import CORS, cross_origin
import json
from json import JSONEncoder
import os
import asyncio
import requests
import sqlite3
from app.src.handler_login import handler_login
from app.src.handler_balance import handler_balance
from app.banking_data.init_db import create_connection
from flask_dance.contrib.google import make_google_blueprint, google
from oauthlib.oauth2.rfc6749.errors import InvalidGrantError, TokenExpiredError, OAuth2Error
from google.cloud import dialogflow_v2beta1 as dialogflow
from google.oauth2 import service_account
from uuid import uuid4
from nordigen import NordigenClient
# NORDIGEN
# Credentials
secret_id="XXX"
secret_key="XXX"
# Configuration
institution_id = "XXX"
app = Flask(__name__)
# set Flask secret key
app.secret_key = os.environ.get("FLASK_SECRET_KEY", "supersekrit")
# GOOGLE API & AUTHENTICATION
app.config["GOOGLE_OAUTH_CLIENT_ID"] = "XXX"
app.config["GOOGLE_OAUTH_CLIENT_SECRET"] = "XXX"
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = "1"
os.environ['OAUTHLIB_RELAX_TOKEN_SCOPE'] = "1"
google_bp = make_google_blueprint(scope=["profile", "email"])
app.register_blueprint(google_bp, url_prefix="/login")
app.config['CORS_HEADERS'] = 'Content-Type'
cors = CORS(app, supports_credentials=True, resources={r"/webhook": {"origins": "*"}})
client = NordigenClient(
secret_id=secret_id,
secret_key=secret_key
)
client.generate_token()
# subclass JSONEncoder
class setEncoder(JSONEncoder):
def default(self, obj):
return list(obj)
#app.route("/")
def index():
if not google.authorized:
return redirect(url_for("google.login"))
try:
resp = google.get("/oauth2/v1/userinfo")
assert resp.ok, resp.text
return "You are {email} on Google".format(email=resp.json()["email"])
except (InvalidGrantError, TokenExpiredError) as e: # or maybe any OAuth2Error
return redirect(url_for("google.login"))
#app.route('/webhook', methods=['GET', 'POST', 'OPTION'])
async def webhook():
"""Handles webhook requests from Dialogflow."""
req = request.get_json(force=True)
query_text = req.get('queryResult').get('queryText')
if query_text:
if query_text == 'google auth':
if not google.authorized:
auth_link = 'MY HARD CODED GOOGLE AUTHENTICATION LINK HERE'
auth_link = {
"fulfillmentText": auth_link,
"source": 'webhook'
}
return auth_link
try:
resp = google.get("/oauth2/v1/userinfo")
assert resp.ok, resp.text
return "You are {email} on Google".format(email=resp.json()["email"])
except (InvalidGrantError, TokenExpiredError) as e: # or maybe any OAuth2Error
auth_link = 'MY HARD CODED GOOGLE AUTHENTICATION LINK HERE'
auth_link = {
"fulfillmentText": auth_link,
"source": 'webhook'
}
return auth_link
if query_text == 'login':
link = await handler_login(client, institution_id, session)
link = {
"fulfillmentText": link,
"source": 'webhook'
}
link = make_response(jsonify(link))
link.headers.add('Access-Control-Allow-Origin', '*')
return link
if query_text == 'balance':
balance = await handler_balance(client, session)
balance = {
"fulfillmentText": "Your text response",
"fulfillmentMessages": [
{
"text": {
"text": [
"Your text response"
]
}
},
{
"payload": {
balance
}
}
]
}
balance = json.dumps(balance, indent=4, cls=setEncoder)
balance = make_response(balance)
return balance
if __name__ == "__main__":
app.run(debug=True)
Here are two helper functions I have created that perform the creation of the login link the the fetching of my banking data via Nordigen:
from uuid import uuid4
async def handler_login(client, institution_id, session):
"""Handles the webhook request."""
# Initialize bank session
init = client.initialize_session(
# institution id
institution_id=institution_id,
# redirect url after successful authentication
redirect_uri="https://nordigen.com",
# additional layer of unique ID defined by you
reference_id=str(uuid4())
)
link = init.link
session["req_id"] = init.requisition_id
return link
async def handler_balance(client, session):
if "req_id" in session:
# Get account id after you have completed authorization with a bank
# requisition_id can be gathered from initialize_session response
#requisition_id = init.requisition_id
accounts = client.requisition.get_requisition_by_id(
requisition_id=session["req_id"]
)
# Get account id from the list.
account_id = accounts["accounts"][0]
#account_id = accounts["id"]
# Create account instance and provide your account id from previous step
account = client.account_api(id=account_id)
# Fetch account metadata
#meta_data = account.get_metadata()
# Fetch details
#details = account.get_details()
# Fetch balances
balance = account.get_balances()
balance = balance["balances"][0]
balance = balance["balanceAmount"]["amount"]
#balance = json.loads(balance)
# Fetch transactions
#transactions = account.get_transactions()
#agent.add(Payload({'balance': balance}))
return balance
Feel free to comment if you need any more input!

http API requests are executed for more than 20 seconds

I have an application on django that can search for YouTube videos via the YouTube Data API v3 and translate their description using the Yandex Translate API. Often everything works fine, both services respond normally. But about once every 10 calls, the request stretches for about 20 seconds. This happens with both translation and YouTube.
I look in the developer console, in the column "Waiting (Time to receive the first byte)" just these 20 seconds. I do not understand why this is so, it is unlikely that the problem is on the side of the services, because it is observed on both. But I can't figure out what my problem is then...
I tried to set DEBUG to False, it was advised somewhere. But the problem has not gone away.
Code of functions for receiving data from YouTube:
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import os
import pickle
SCOPES = ["https://www.googleapis.com/auth/youtube.force-ssl"]
def auth():
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
api_service_name = "youtube"
api_version = "v3"
credentials_filename = "credentials.json"
credentials = None
if os.path.exists("token.pickle"):
with open("token.pickle", "rb") as token:
credentials = pickle.load(token)
if not credentials or not credentials.valid:
if credentials and credentials.expired and credentials.refresh_token:
credentials.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(credentials_filename, SCOPES)
credentials = flow.run_local_server(port=8000)
with open("token.pickle", "wb") as token:
pickle.dump(credentials, token)
return build(api_service_name, api_version, credentials=credentials)
def search(youtube, **kwargs):
return youtube.search().list(part="snippet", **kwargs).execute()
Their call code:
yt = auth()
response = search(yt, q=request.POST['search'], maxResults=20, type='video')['items']
Function code for Yandex API:
endpoint_url = 'https://translate.api.cloud.yandex.net/translate/v2/translate'
list_texts = [str(texts[key]) for key in texts]
body = {
"targetLanguageCode": language,
"texts": list_texts,
"folderId": self.folder_id,
}
headers = {
"Content-Type": "application/json",
"Authorization": "Bearer {0}".format(self.token)
}
response = requests.post(endpoint_url, json=body, headers=headers)

Cannot fix start manual transfer runs method for bigquery_datatransfer_v1

I am trying to run a scheduled query only when a table updates in big query. For this I am trying to make this python code to work in cloud functions, but it is giving me error. Would highly appreciate any help.
I am running this python code :
import time
from google.protobuf.timestamp_pb2 import Timestamp
from google.cloud import bigquery_datatransfer_v1
def runQuery (parent,requested_run_time):
client = bigquery_datatransfer_v1.DataTransferServiceClient()
projectid = '629586xxxx' # Enter your projectID here
transferid = '60cc15f8-xxxx-xxxx-8ba2-xxxxx41bc' # Enter your transferId here
parent = client.transfer_config_path(projectid, transferid)
start_time = Timestamp(seconds=int(time.time() + 10))
response = client.start_manual_transfer_runs(parent, requested_run_time=start_time)
print(response)
I get this error
start_manual_transfer_runs() got an unexpected keyword argument 'requested_run_time'
I created doing this using cloud function and it worked for me.
import time
from google.protobuf.timestamp_pb2 import Timestamp
from google.cloud import bigquery_datatransfer_v1
def runQuery(parent, requested_run_time):
client = bigquery_datatransfer_v1.DataTransferServiceClient()
PROJECT_ID = 'graphical-reach-285218'
TRANSFER_CONFIG_ID = '61adfc39-0000-206b-a7b0-089e08324288'
parent = client.project_transfer_config_path(PROJECT_ID, TRANSFER_CONFIG_ID)
start_time = bigquery_datatransfer_v1.types.Timestamp(seconds=int(time.time() + 10))
response = client.start_manual_transfer_runs(parent, requested_run_time=start_time)
print(parent)
print(response)
Replace with your project id and transfer config id. The above code will go in main.py and in requirements.txt , please keep
google-cloud-bigquery-datatransfer==1

AWS Cloudwatch Logs to Azure Log Analytics

I am aware of the HTTP Data Collector API that can be used to pull data into Azure Log analytics, my ask here is on AWS Cloudwatch data to Azure. We have Azure hosted application and an external AWS hosted Serverless Lamda functions and we want to import the logs of those 13 serverless functions into Azure. I know from the documentation and there is a python function that can be used as a AWS Lamda function and the python example is in MSFT documentation. But what I am failing to understand is what Json format that AWS cloud collector needs to create so they can send it to Azure Log Analytics. Any examples on this ? Any help on how this can be done. I have come across this blog also but that is splunk specific. https://www.splunk.com/blog/2017/02/03/how-to-easily-stream-aws-cloudwatch-logs-to-splunk.html
Hey never mind I was able to dig a little deeper and I found that in AWS I can STREAM the Logs from one Lambda to other Lambda function thru subscription. Once that was setthen all I did was consumed that and on the fly created the JSON and sent it to Azure Logs. In case if you or anyone is interested in it, following is the code:-
import json
import datetime
import hashlib
import hmac
import base64
import boto3
import datetime
import gzip
from botocore.vendored import requests
from datetime import datetime
Update the customer ID to your Log Analytics workspace ID
customer_id = "XXXXXXXYYYYYYYYYYYYZZZZZZZZZZ"
For the shared key, use either the primary or the secondary Connected Sources client authentication key
shared_key = "XXXXXXXXXXXXXXXXXXXXXXXXXX"
The log type is the name of the event that is being submitted
log_type = 'AWSLambdafuncLogReal'
json_data = [{
"slot_ID": 12345,
"ID": "5cdad72f-c848-4df0-8aaa-ffe033e75d57",
"availability_Value": 100,
"performance_Value": 6.954,
"measurement_Name": "last_one_hour",
"duration": 3600,
"warning_Threshold": 0,
"critical_Threshold": 0,
"IsActive": "true"
},
{
"slot_ID": 67890,
"ID": "b6bee458-fb65-492e-996d-61c4d7fbb942",
"availability_Value": 100,
"performance_Value": 3.379,
"measurement_Name": "last_one_hour",
"duration": 3600,
"warning_Threshold": 0,
"critical_Threshold": 0,
"IsActive": "false"
}]
#body = json.dumps(json_data)
#####################
######Functions######
#####################
Build the API signature
def build_signature(customer_id, shared_key, date, content_length, method, content_type, resource):
x_headers = 'x-ms-date:' + date
string_to_hash = method + "\n" + str(content_length) + "\n" + content_type + "\n" + x_headers + "\n" + resource
bytes_to_hash = bytes(string_to_hash, encoding="utf-8")
decoded_key = base64.b64decode(shared_key)
encoded_hash = base64.b64encode(
hmac.new(decoded_key, bytes_to_hash, digestmod=hashlib.sha256).digest()).decode()
authorization = "SharedKey {}:{}".format(customer_id,encoded_hash)
return authorization
Build and send a request to the POST API
def post_data(customer_id, shared_key, body, log_type):
method = 'POST'
content_type = 'application/json'
resource = '/api/logs'
rfc1123date = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
print (rfc1123date)
content_length = len(body)
signature = build_signature(customer_id, shared_key, rfc1123date, content_length, method, content_type, resource)
uri = 'https://' + customer_id + '.ods.opinsights.azure.com' + resource + '?api-version=2016-04-01'
headers = {
'content-type': content_type,
'Authorization': signature,
'Log-Type': log_type,
'x-ms-date': rfc1123date
}
response = requests.post(uri,data=body, headers=headers)
if (response.status_code >= 200 and response.status_code <= 299):
print("Accepted")
else:
print("Response code: {}".format(response.status_code))
print(response.text)
def lambda_handler(event, context):
cloudwatch_event = event["awslogs"]["data"]
decode_base64 = base64.b64decode(cloudwatch_event)
decompress_data = gzip.decompress(decode_base64)
log_data = json.loads(decompress_data)
print(log_data)
awslogdata = json.dumps(log_data)
post_data(customer_id, shared_key, awslogdata, log_type)

Deleting a video from youtube YouTube Data API v3 and python

I'm developing an application using Django and angularJS.
One of the major thing that worker server (coded in python, flask) does is downloading videos from s3 (which are uploaded by users) and uploading the videos to youtube.
Is there way to "delete a youtube video in python"?.
There is no such a code example written in python.
Does anyone know how to do this simply, like the code example below?
This is sample code for uploading video. I referred this code and implemented uploading feature.
def get_authenticated_service(args):
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=YOUTUBE_UPLOAD_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run_flow(flow, storage, args)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def initialize_upload(youtube, options):
tags = None
if options.keywords:
tags = options.keywords.split(",")
body=dict(
snippet=dict(
title=options.title,
description=options.description,
tags=tags,
categoryId=options.category
),
status=dict(
privacyStatus=options.privacyStatus
)
)
# Call the API's videos.insert method to create and upload the video.
insert_request = youtube.videos().insert(
part=",".join(body.keys()),
body=body,
media_body=MediaFileUpload(options.file, chunksize=-1, resumable=True)
)
resumable_upload(insert_request)
Make a file called: delete_video.py
Usage: python delete_video.py --id=MY_VID_ID
#!/usr/bin/python
import httplib
import httplib2
import os
import random
import sys
import time
from apiclient.discovery import build
from apiclient.errors import HttpError
from apiclient.http import MediaFileUpload
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client.tools import argparser, run_flow
# Explicitly tell the underlying HTTP transport library not to retry, since
# we are handling retry logic ourselves.
httplib2.RETRIES = 1
# Maximum number of times to retry before giving up.
MAX_RETRIES = 10
# Always retry when these exceptions are raised.
RETRIABLE_EXCEPTIONS = (httplib2.HttpLib2Error, IOError, httplib.NotConnected,
httplib.IncompleteRead, httplib.ImproperConnectionState,
httplib.CannotSendRequest, httplib.CannotSendHeader,
httplib.ResponseNotReady, httplib.BadStatusLine)
# Always retry when an apiclient.errors.HttpError with one of these status
# codes is raised.
RETRIABLE_STATUS_CODES = [500, 502, 503, 504]
# The CLIENT_SECRETS_FILE variable specifies the name of a file that contains
# the OAuth 2.0 information for this application, including its client_id and
# client_secret. You can acquire an OAuth 2.0 client ID and client secret from
# the Google Developers Console at
# https://console.developers.google.com/.
# Please ensure that you have enabled the YouTube Data API for your project.
# For more information about using OAuth2 to access the YouTube Data API, see:
# https://developers.google.com/youtube/v3/guides/authentication
# For more information about the client_secrets.json file format, see:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
CLIENT_SECRETS_FILE = "client_secrets.json"
# This OAuth 2.0 access scope allows an application to upload files to the
# authenticated user's YouTube channel, but doesn't allow other types of access.
YOUTUBE_DELETE_SCOPE = "https://www.googleapis.com/auth/youtube"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
# This variable defines a message to display if the CLIENT_SECRETS_FILE is
# missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the Developers Console
https://console.developers.google.com/
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
VALID_PRIVACY_STATUSES = ("public", "private", "unlisted")
def get_authenticated_service(args):
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=YOUTUBE_DELETE_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run_flow(flow, storage, args)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
if __name__ == '__main__':
argparser.add_argument("--id", required=True, help="Video youtube ID")
args = argparser.parse_args()
if not args.id:
exit("Please specify a youtube ID using the --id= parameter.")
youtube = get_authenticated_service(args)
try:
resp = youtube.videos().delete(id=args.id, onBehalfOfContentOwner=None).execute()
except HttpError, e:
print "An HTTP error %d occurred:\n%s" % (e.resp.status, e.content)
Assuming that you are using the python client library I found this in the documentation.
delete(id=*, onBehalfOfContentOwner=None) Deletes a YouTube video.
Args: id: string, The id parameter specifies the YouTube video ID
for the resource that is being deleted. In a video resource, the id
property specifies the video's ID. (required)
onBehalfOfContentOwner: string, Note: This parameter is intended
exclusively for YouTube content partners.