Posting data on opensearch service via lambda, but when I am going to opensearch service Endpoint URL to check getting below error.
{
"error" : "no handler found for uri [/lambda-s3-index/lambda-type/_search] and method [GET]"
}
Tried printing the response while posting, getting 400. below is the code
import boto3
import requests
from requests_aws4auth import AWS4Auth
import os
import json
import datetime
region = 'us-east-1'
service = 'es'
credentials = boto3.Session().get_credentials()
awsauth = AWS4Auth(credentials.access_key, credentials.secret_key, region, service, session_token=credentials.token)
index = 'lambda-s3-index'
type = 'lambda-type'
host = os.environ['ES_DOMAIN_URL']
url = host + '/' + index + '/' + type
headers = { "Content-Type": "application/json" }
s3 = boto3.client('s3')
bucket = os.environ['S3_BUCKET']
# Lambda execution starts here
def handler(event, context):
sensorID = event['sensorID']
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
temperature = event['temperature']
document = { "sensorID": sensorID, "timestamp": timestamp, "temperature": temperature }
print(document)
# post to S3 for storage
s3.put_object(Body=json.dumps(document).encode(), Bucket=bucket, Key=sensorID+"-"+timestamp+".json")
# post to amazon elastic search for indexing and kibana use
r = requests.post(url, auth=awsauth, json=document, headers=headers)
print(r)
response = "Data Uploaded"
return {
"Response" : response,
"sensorID" : sensorID,
"temperature": temperature
}
Related
I'm working to forward received email on AWS SES to slack webhook.
The workflow I tried is:
my personal email -> SES -> S3 -> Lambda -> POST Request
I've been stuck on this lambda function since its not sending post request to webhook url
from ast import parse
import boto3
import ConfigParser
import urllib3
import json
from email.parser import FeedParser
from email.header import decode_header
http = urllib3.PoolManager()
def lambda_handler(event, context):
try:
record = event["Records"][0]
bucket_region = record["awsRegion"]
bucket_name = record["s3"]["bucket"]["name"]
mail_object_key = record["s3"]["object"]["key"]
s3 = boto3.client('s3', region_name=bucket_region)
mail_object = s3.get_object(Bucket = bucket_name, Key = mail_object_key)
mail_body = ''
try:
mail_body = mail_object["Body"].read().decode('utf-8')
except:
mail_body = mail_object["Body"].read()
parser = FeedParser()
parser.feed(mail_body)
parsed_mail = parser.close()
(d_sub, sub_charset) = decode_header(parsed_mail['Subject'])[0]
subject = d_sub.decode(sub_charset)
payload = parsed_mail.get_payload(decode=parsed_mail['Content-Transfer-Encoding'])
body_charset = parsed_mail.get_content_charset()
body = payload.decode(body_charset)
url = "MY_SLACK_WEBHOOK_URL"
msg = {
"Content": parsed_mail
}
encoded_msg = json.dumps(msg).encode('utf-8')
resp = http.request('POST',url, body=encoded_msg)
print({
"message": parsed_mail,
"status_code": resp.status,
"response": resp.data
})
except:
print('Mail received, but I got some error.')
Could anyone please look out to my code?
This is the cloudwatch log when lambda event triggered.
START RequestId: 00f3e7db-807e-48e9-a775-6f0117431b83 Version: $LATEST
Mail received, but I got some error.
END RequestId: 00f3e7db-807e-48e9-a775-6f0117431b83
REPORT RequestId: 00f3e7db-807e-48e9-a775-6f0117431b83 Duration: 1933.59 ms Billed Duration: 1934 ms Memory Size: 128 MB Max Memory Used: 74 MB Init Duration: 317.15 ms
I'm getting this error in my cloudwatch logs
botocore.exceptions.ClientError: An error occurred (UnrecognizedClientException) when calling the UpdateItem operation: The security token included in the request is invalid.
I'm authorizing an IAM User with full admin access to my boto3.client DynamoDB table and it's giving me that error. Here's my application code for context
from flask import Flask, Blueprint, render_template, request
import requests
import boto3
import os
AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
pages = Blueprint('pages', __name__)
app = Flask(__name__)
table_name = "counterdb"
dynamodb = boto3.resource("dynamodb", region_name="us-west-2", aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
db_client = boto3.client("dynamodb", region_name="us-west-2", aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
table = dynamodb.Table(table_name)
def increment_visitor():
response = db_client.update_item(
TableName=table_name,
Key = {
'Primary Key': {
'N': "0"
}
},
ExpressionAttributeValues = { ":inc": {"N": "1"}},
UpdateExpression = "ADD visitor :inc"
)
def retrieve_visitor_count():
item = table.get_item(
Key = {
"Primary Key": 0
}
)
visitcount = (item["Item"])["visitor"]
return visitcount
def handler():
increment_visitor()
return retrieve_visitor_count()
# #app.before_first_request
# def before_first_request():
# dbcounter = handler()
# print(dbcounter)
#app.route('/')
def home():
return render_template("index.html")
if __name__ == "__main__":
handler()
app.run(host='0.0.0.0', port=80)
I originally didn't have the region specified and was told that could be it, so I added a region as well. I got the same error with and without specifying the region in my boto3.client. I triple checked and it's the correct IAM User that I'm using with full admin access, do I need more permissions or is this a different issue causing the error?
I have a lambda function that generates a list of untagged buckets in AWS environment. Currently I send the output to a slack channel directly. Instead I would like to have my lambda dump the output to a csv file and send it as a report. Here is the code for it, let me know if you need any other details.
import boto3
from botocore.exceptions import ClientError
import urllib3
import json
http = urllib3.PoolManager()
def lambda_handler(event, context):
#Printing the S3 buckets with no tags
s3 = boto3.client('s3')
s3_re = boto3.resource('s3')
buckets = []
print('Printing buckets with no tags..')
for bucket in s3_re.buckets.all():
s3_bucket = bucket
s3_bucket_name = s3_bucket.name
try:
response = s3.get_bucket_tagging(Bucket=s3_bucket_name)
except ClientError:
buckets.append(bucket)
print(bucket)
for bucket in buckets:
data = {"text": "%s bucket has no tags" % (bucket)}
r = http.request("POST", "https://hooks.slack.com/services/~/~/~",
body = json.dumps(data),
headers = {"Content-Type": "application/json"})
I am aware of the HTTP Data Collector API that can be used to pull data into Azure Log analytics, my ask here is on AWS Cloudwatch data to Azure. We have Azure hosted application and an external AWS hosted Serverless Lamda functions and we want to import the logs of those 13 serverless functions into Azure. I know from the documentation and there is a python function that can be used as a AWS Lamda function and the python example is in MSFT documentation. But what I am failing to understand is what Json format that AWS cloud collector needs to create so they can send it to Azure Log Analytics. Any examples on this ? Any help on how this can be done. I have come across this blog also but that is splunk specific. https://www.splunk.com/blog/2017/02/03/how-to-easily-stream-aws-cloudwatch-logs-to-splunk.html
Hey never mind I was able to dig a little deeper and I found that in AWS I can STREAM the Logs from one Lambda to other Lambda function thru subscription. Once that was setthen all I did was consumed that and on the fly created the JSON and sent it to Azure Logs. In case if you or anyone is interested in it, following is the code:-
import json
import datetime
import hashlib
import hmac
import base64
import boto3
import datetime
import gzip
from botocore.vendored import requests
from datetime import datetime
Update the customer ID to your Log Analytics workspace ID
customer_id = "XXXXXXXYYYYYYYYYYYYZZZZZZZZZZ"
For the shared key, use either the primary or the secondary Connected Sources client authentication key
shared_key = "XXXXXXXXXXXXXXXXXXXXXXXXXX"
The log type is the name of the event that is being submitted
log_type = 'AWSLambdafuncLogReal'
json_data = [{
"slot_ID": 12345,
"ID": "5cdad72f-c848-4df0-8aaa-ffe033e75d57",
"availability_Value": 100,
"performance_Value": 6.954,
"measurement_Name": "last_one_hour",
"duration": 3600,
"warning_Threshold": 0,
"critical_Threshold": 0,
"IsActive": "true"
},
{
"slot_ID": 67890,
"ID": "b6bee458-fb65-492e-996d-61c4d7fbb942",
"availability_Value": 100,
"performance_Value": 3.379,
"measurement_Name": "last_one_hour",
"duration": 3600,
"warning_Threshold": 0,
"critical_Threshold": 0,
"IsActive": "false"
}]
#body = json.dumps(json_data)
#####################
######Functions######
#####################
Build the API signature
def build_signature(customer_id, shared_key, date, content_length, method, content_type, resource):
x_headers = 'x-ms-date:' + date
string_to_hash = method + "\n" + str(content_length) + "\n" + content_type + "\n" + x_headers + "\n" + resource
bytes_to_hash = bytes(string_to_hash, encoding="utf-8")
decoded_key = base64.b64decode(shared_key)
encoded_hash = base64.b64encode(
hmac.new(decoded_key, bytes_to_hash, digestmod=hashlib.sha256).digest()).decode()
authorization = "SharedKey {}:{}".format(customer_id,encoded_hash)
return authorization
Build and send a request to the POST API
def post_data(customer_id, shared_key, body, log_type):
method = 'POST'
content_type = 'application/json'
resource = '/api/logs'
rfc1123date = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
print (rfc1123date)
content_length = len(body)
signature = build_signature(customer_id, shared_key, rfc1123date, content_length, method, content_type, resource)
uri = 'https://' + customer_id + '.ods.opinsights.azure.com' + resource + '?api-version=2016-04-01'
headers = {
'content-type': content_type,
'Authorization': signature,
'Log-Type': log_type,
'x-ms-date': rfc1123date
}
response = requests.post(uri,data=body, headers=headers)
if (response.status_code >= 200 and response.status_code <= 299):
print("Accepted")
else:
print("Response code: {}".format(response.status_code))
print(response.text)
def lambda_handler(event, context):
cloudwatch_event = event["awslogs"]["data"]
decode_base64 = base64.b64decode(cloudwatch_event)
decompress_data = gzip.decompress(decode_base64)
log_data = json.loads(decompress_data)
print(log_data)
awslogdata = json.dumps(log_data)
post_data(customer_id, shared_key, awslogdata, log_type)
i have created a lambda function that sends emails whenever a file is uploaded on s3 bucket, but now i want to have all the informations related to that file as the name, size, date and time of upload, and if it's possible where it comes from.
I have all this infortmation on aws console, but want to have it in the email body.
i am using serverless framework. v 1.22.0
here is my code
import json
import boto3
import botocore
import logging
import sys
import os
import traceback
from botocore.exceptions import ClientError
from pprint import pprint
from time import strftime, gmtime
email_from = '********#*****.com'
email_to = '********#*****.com'
email_subject = 'new event on s3 '
email_body = 'a new file is uploaded'
#setup simple logging for INFO
logger = logging.getLogger()
logger.setLevel(logging.INFO)
from botocore.exceptions import ClientError
def sthree(event, context):
"""Send email whenever a file is uploaded to S3"""
body = {}
status_code = 200
email_body = str(context)
try:
s3 = boto3.client('s3')
ses = boto3.client('ses')
ses.send_email(Source = email_from,
Destination = {'ToAddresses': [email_to,],},
Message = {'Subject': {'Data': email_subject}, 'Body':{'Text' : {'Data': email_body}}}
)
except Exception as e:
print(traceback.format_exc())
status_code = 500
body["message"] = json.dumps(e)
response = {
"statusCode": 200,
"body": json.dumps(body)
}
return response
Here is the event json structure sent by S3 upon object creation:
http://docs.aws.amazon.com/AmazonS3/latest/dev/notification-content-structure.html
You can get the file names, sizes and source ip like this:
for record in event['Records']:
filename = record['s3']['object']['key'];
filesize = record['s3']['object']['size'];
source = record['requestParameters']['sourceIPAddress'];
eventTime = record['eventTime'];
def lambda_handler(event, context):
s3 = boto3.client('s3')
email_from = 'XXXXXXXXX#XXX.com'
email_to = 'XXXXXXXXX#XXX.com'
email_subject = 'new event on s3'
email_body = "File Name :" + event[u'Records'][0][u's3'][u'object'][u'key'] + "\n" + "File Size :" + str(event[u'Records'][0][u's3'][u'object'][u'size']) + "\n" + "Upload Time :" + event[u'Records'][0][u'eventTime'] + "\n" + "User Details :" + event[u'Records'][0][u'userIdentity'][u'principalId']
ses = boto3.client('ses')
ses.send_email(Source = email_from,
Destination = {'ToAddresses': [email_to,],},
Message = {'Subject': {'Data': email_subject}, 'Body':{'Text' : {'Data': email_body}}}
)
print("Function execution Completed !!!")