I'm working to forward received email on AWS SES to slack webhook.
The workflow I tried is:
my personal email -> SES -> S3 -> Lambda -> POST Request
I've been stuck on this lambda function since its not sending post request to webhook url
from ast import parse
import boto3
import ConfigParser
import urllib3
import json
from email.parser import FeedParser
from email.header import decode_header
http = urllib3.PoolManager()
def lambda_handler(event, context):
try:
record = event["Records"][0]
bucket_region = record["awsRegion"]
bucket_name = record["s3"]["bucket"]["name"]
mail_object_key = record["s3"]["object"]["key"]
s3 = boto3.client('s3', region_name=bucket_region)
mail_object = s3.get_object(Bucket = bucket_name, Key = mail_object_key)
mail_body = ''
try:
mail_body = mail_object["Body"].read().decode('utf-8')
except:
mail_body = mail_object["Body"].read()
parser = FeedParser()
parser.feed(mail_body)
parsed_mail = parser.close()
(d_sub, sub_charset) = decode_header(parsed_mail['Subject'])[0]
subject = d_sub.decode(sub_charset)
payload = parsed_mail.get_payload(decode=parsed_mail['Content-Transfer-Encoding'])
body_charset = parsed_mail.get_content_charset()
body = payload.decode(body_charset)
url = "MY_SLACK_WEBHOOK_URL"
msg = {
"Content": parsed_mail
}
encoded_msg = json.dumps(msg).encode('utf-8')
resp = http.request('POST',url, body=encoded_msg)
print({
"message": parsed_mail,
"status_code": resp.status,
"response": resp.data
})
except:
print('Mail received, but I got some error.')
Could anyone please look out to my code?
This is the cloudwatch log when lambda event triggered.
START RequestId: 00f3e7db-807e-48e9-a775-6f0117431b83 Version: $LATEST
Mail received, but I got some error.
END RequestId: 00f3e7db-807e-48e9-a775-6f0117431b83
REPORT RequestId: 00f3e7db-807e-48e9-a775-6f0117431b83 Duration: 1933.59 ms Billed Duration: 1934 ms Memory Size: 128 MB Max Memory Used: 74 MB Init Duration: 317.15 ms
Related
Posting data on opensearch service via lambda, but when I am going to opensearch service Endpoint URL to check getting below error.
{
"error" : "no handler found for uri [/lambda-s3-index/lambda-type/_search] and method [GET]"
}
Tried printing the response while posting, getting 400. below is the code
import boto3
import requests
from requests_aws4auth import AWS4Auth
import os
import json
import datetime
region = 'us-east-1'
service = 'es'
credentials = boto3.Session().get_credentials()
awsauth = AWS4Auth(credentials.access_key, credentials.secret_key, region, service, session_token=credentials.token)
index = 'lambda-s3-index'
type = 'lambda-type'
host = os.environ['ES_DOMAIN_URL']
url = host + '/' + index + '/' + type
headers = { "Content-Type": "application/json" }
s3 = boto3.client('s3')
bucket = os.environ['S3_BUCKET']
# Lambda execution starts here
def handler(event, context):
sensorID = event['sensorID']
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
temperature = event['temperature']
document = { "sensorID": sensorID, "timestamp": timestamp, "temperature": temperature }
print(document)
# post to S3 for storage
s3.put_object(Body=json.dumps(document).encode(), Bucket=bucket, Key=sensorID+"-"+timestamp+".json")
# post to amazon elastic search for indexing and kibana use
r = requests.post(url, auth=awsauth, json=document, headers=headers)
print(r)
response = "Data Uploaded"
return {
"Response" : response,
"sensorID" : sensorID,
"temperature": temperature
}
we have a AWS code-pipeline for Angular Application build and deployment.
I have added a approval stage where user needs give approval using the link provided in mail.
following is pipeline work flow
when pipeline reached approval stage, it will send out a mail to a SNS topic, mail body will contain links for Approve and Reject. these are API gateway url to pass the pipeline details and user approval to integrated lambda.
example of API url: https://xxxxx.execute-api.us-east-1.amazonaws.com/v0/pipeline-approval?action=Approved&pipeline=pipeline-name-here=release-approval&pipelineexecutionid=xxxxxxx
And the mail I'm receiving containing symantec link as it's coming from AWS, https://clicktime.symantec.com/34RBC48WLEQRUG7Vc?u=https%3A%2F%xxxxx.execute-api.us-east-1.amazonaws.com%2Fv0%2Fpipeline-approval%3Faction%3DApproved%26pipeline%3Dpiplinexxx%3Drelease-approval%26pipelineexecutionid%3xxxxxx
below is the lambda function code
import json
import logging
import re
import time
import boto3
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
MAX_WAIT_FOR_RESPONSE = 10
WAIT_INCREMENT = 1
def handler(event, context):
logger.info('REQUEST RECEIVED:\n %s', event)
logger.info('REQUEST RECEIVED:\n %s', context)
pipeline = event["queryStringParameters"]['pipeline']
stage = event["queryStringParameters"]['stage']
action = event["queryStringParameters"]['action']
approval_action = 'transition'
pipelineexecutionid = event["queryStringParameters"]['pipelineexecutionid']
client = boto3.client('codepipeline')
r = client.get_pipeline_state(name=pipeline)['stageStates']
print(r)
s = next((x for x in r if x['stageName'] == stage and x['latestExecution']['pipelineExecutionId'] == pipelineexecutionid ), None)
print(s)
s1 = s['actionStates']
print(s1)
s2 = next((y for y in s1 if y['actionName'] == approval_action ), None)
print(s2)
t = s2['latestExecution']['token']
print(t)
client.put_approval_result(
pipelineName=pipeline,
stageName=stage,
actionName=approval_action,
result={
'summary': 'Automatically approved by Lambda.',
'status': action
},
token=t
)
logger.info("Status message: %s", client.put_approval_result)
if action == 'Approved':
return {"statusCode": 200, "body": json.dumps('Thank you for approving the release!!')}
elif action == 'Rejected':
return {"statusCode": 200, "body": json.dumps('rejected.')}
The issue is after reaching the approval stage it's sending out a mail but it's not waiting for user input, it's automatically getting Approved or Rejected itself with in 2 to 5sec.
please help me what is going wrong here, why Lambda not waiting for response from API and why it's Approving or rejecting automatically.
I am aware of the HTTP Data Collector API that can be used to pull data into Azure Log analytics, my ask here is on AWS Cloudwatch data to Azure. We have Azure hosted application and an external AWS hosted Serverless Lamda functions and we want to import the logs of those 13 serverless functions into Azure. I know from the documentation and there is a python function that can be used as a AWS Lamda function and the python example is in MSFT documentation. But what I am failing to understand is what Json format that AWS cloud collector needs to create so they can send it to Azure Log Analytics. Any examples on this ? Any help on how this can be done. I have come across this blog also but that is splunk specific. https://www.splunk.com/blog/2017/02/03/how-to-easily-stream-aws-cloudwatch-logs-to-splunk.html
Hey never mind I was able to dig a little deeper and I found that in AWS I can STREAM the Logs from one Lambda to other Lambda function thru subscription. Once that was setthen all I did was consumed that and on the fly created the JSON and sent it to Azure Logs. In case if you or anyone is interested in it, following is the code:-
import json
import datetime
import hashlib
import hmac
import base64
import boto3
import datetime
import gzip
from botocore.vendored import requests
from datetime import datetime
Update the customer ID to your Log Analytics workspace ID
customer_id = "XXXXXXXYYYYYYYYYYYYZZZZZZZZZZ"
For the shared key, use either the primary or the secondary Connected Sources client authentication key
shared_key = "XXXXXXXXXXXXXXXXXXXXXXXXXX"
The log type is the name of the event that is being submitted
log_type = 'AWSLambdafuncLogReal'
json_data = [{
"slot_ID": 12345,
"ID": "5cdad72f-c848-4df0-8aaa-ffe033e75d57",
"availability_Value": 100,
"performance_Value": 6.954,
"measurement_Name": "last_one_hour",
"duration": 3600,
"warning_Threshold": 0,
"critical_Threshold": 0,
"IsActive": "true"
},
{
"slot_ID": 67890,
"ID": "b6bee458-fb65-492e-996d-61c4d7fbb942",
"availability_Value": 100,
"performance_Value": 3.379,
"measurement_Name": "last_one_hour",
"duration": 3600,
"warning_Threshold": 0,
"critical_Threshold": 0,
"IsActive": "false"
}]
#body = json.dumps(json_data)
#####################
######Functions######
#####################
Build the API signature
def build_signature(customer_id, shared_key, date, content_length, method, content_type, resource):
x_headers = 'x-ms-date:' + date
string_to_hash = method + "\n" + str(content_length) + "\n" + content_type + "\n" + x_headers + "\n" + resource
bytes_to_hash = bytes(string_to_hash, encoding="utf-8")
decoded_key = base64.b64decode(shared_key)
encoded_hash = base64.b64encode(
hmac.new(decoded_key, bytes_to_hash, digestmod=hashlib.sha256).digest()).decode()
authorization = "SharedKey {}:{}".format(customer_id,encoded_hash)
return authorization
Build and send a request to the POST API
def post_data(customer_id, shared_key, body, log_type):
method = 'POST'
content_type = 'application/json'
resource = '/api/logs'
rfc1123date = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
print (rfc1123date)
content_length = len(body)
signature = build_signature(customer_id, shared_key, rfc1123date, content_length, method, content_type, resource)
uri = 'https://' + customer_id + '.ods.opinsights.azure.com' + resource + '?api-version=2016-04-01'
headers = {
'content-type': content_type,
'Authorization': signature,
'Log-Type': log_type,
'x-ms-date': rfc1123date
}
response = requests.post(uri,data=body, headers=headers)
if (response.status_code >= 200 and response.status_code <= 299):
print("Accepted")
else:
print("Response code: {}".format(response.status_code))
print(response.text)
def lambda_handler(event, context):
cloudwatch_event = event["awslogs"]["data"]
decode_base64 = base64.b64decode(cloudwatch_event)
decompress_data = gzip.decompress(decode_base64)
log_data = json.loads(decompress_data)
print(log_data)
awslogdata = json.dumps(log_data)
post_data(customer_id, shared_key, awslogdata, log_type)
I have a simple AWS CodePipeline with the standard "Source" -> "Build" -> "Deploy" pipeline stages that work fine and I am trying to add my own custom final pipeline stage that is a single AWS Lambda Function. The problem is my last, custom Lambda function runs multiple times and after a very long time, errors with the following message:
Please see the attached screenshot for the whole pipeline:
When the pipeline reaches this final step, it spins for a very long time with the "Blue ( In-Progress )" status before showing an error as shown here:
Here is my Lambda Function code:
from __future__ import print_function
import hashlib
import time
import os
import boto3
import json
from botocore.exceptions import ClientError
def lambda_handler(event, context):
# Test
AWS_ACCESS_KEY = ASDF1234
AWS_SECRET_KEY = ASDF1234
SQS_TESTING_OUTPUT_STATUS_QUEUE_NAME = 'TestingOutputQueue'
# Get the code pipeline
code_pipeline = boto3.client('codepipeline')
# Get the job_id
for key, value in event.items():
print(key,value)
job_id = event['CodePipeline.job']['id']
DATA = json.dumps(event)
# Create a connection the SQS Notification service
sqs_resource_connection = boto3.resource(
'sqs',
aws_access_key_id = AWS_ACCESS_KEY,
aws_secret_access_key = AWS_SECRET_KEY,
region_name = 'us-west-2'
)
# Get the queue handle
print("Waiting for notification from AWS ...")
queue = sqs_resource_connection.get_queue_by_name(QueueName = SQS_TESTING_OUTPUT_STATUS_QUEUE_NAME)
messageContent = ""
cnt = 1
# Replace sender#example.com with your "From" address.
# This address must be verified with Amazon SES.
SENDER = ME
# Replace recipient#example.com with a "To" address. If your account
# is still in the sandbox, this address must be verified.
RECIPIENTS = [YOU]
# If necessary, replace us-west-2 with the AWS Region you're using for Amazon SES.
AWS_REGION = "us-east-1"
# The subject line for the email.
SUBJECT = "Test Case Results"
# The email body for recipients with non-HTML email clients.
BODY_TEXT = ("Test Case Results Were ...")
# The HTML body of the email.
BODY_HTML = """<html>
<head></head>
<body>
<h1>Amazon SES Test (SDK for Python)</h1>
<p>%s</p>
</body>
</html>
"""%(DATA)
# The character encoding for the email.
CHARSET = "UTF-8"
# Create a new SES resource and specify a region.
client = boto3.client('ses', region_name=AWS_REGION)
# Try to send the email.
try:
# Provide the contents of the email.
response = client.send_email(
Destination={
'ToAddresses': RECIPIENTS,
},
Message={
'Body': {
'Html': {
'Charset': CHARSET,
'Data': BODY_HTML,
},
'Text': {
'Charset': CHARSET,
'Data': BODY_TEXT,
},
},
'Subject': {
'Charset': CHARSET,
'Data': SUBJECT,
},
},
Source=SENDER,
# If you are not using a configuration set, comment or delete the
# following line
#ConfigurationSetName=CONFIGURATION_SET,
)
# Display an error if something goes wrong.
except ClientError as e:
code_pipeline.put_third_party_job_failure_result(jobId=job_id, failureDetails={'message': message, 'type': 'JobFailed'})
code_pipeline.put_job_failure_result(jobId=job_id, failureDetails={'message': message, 'type': 'JobFailed'})
print(e.response['Error']['Message'])
else:
code_pipeline.put_third_party_job_success_result(jobId=job_id)
code_pipeline.put_job_success_result(jobId=job_id)
print("Email sent! Message ID:"),
print(response['MessageId'])
print('Function complete.')
return "Complete."
How can I get the Lambda to fire once and return so the pipeline can complete properly.
You are missing an important integration between your Lambda Function and the CodePipeline service.
You MUST notify CodePipeline about the result of your custom step, whether it succeeded or not - see my examples below.
Reporting success:
function reportSuccess(job_id) {
var codepipeline = new AWS.CodePipeline();
var params = {
jobId: job_id,
};
return codepipeline.putJobSuccessResult(params).promise();
}
Reporting failure:
function reportFailure(job_id, invoke_id, message) {
var codepipeline = new AWS.CodePipeline();
var params = {
failureDetails: {
message: message,
type: 'JobFailed',
externalExecutionId: invoke_id,
},
jobId: job_id,
};
return codepipeline.putJobFailureResult(params).promise();
}
The integration was designed this way because one may want to integrate with an external job worker, in which their Lambda starts that worker (example, an approval process), and that worker then takes control and decides whether the whole step succeeded or failed.
i have created a lambda function that sends emails whenever a file is uploaded on s3 bucket, but now i want to have all the informations related to that file as the name, size, date and time of upload, and if it's possible where it comes from.
I have all this infortmation on aws console, but want to have it in the email body.
i am using serverless framework. v 1.22.0
here is my code
import json
import boto3
import botocore
import logging
import sys
import os
import traceback
from botocore.exceptions import ClientError
from pprint import pprint
from time import strftime, gmtime
email_from = '********#*****.com'
email_to = '********#*****.com'
email_subject = 'new event on s3 '
email_body = 'a new file is uploaded'
#setup simple logging for INFO
logger = logging.getLogger()
logger.setLevel(logging.INFO)
from botocore.exceptions import ClientError
def sthree(event, context):
"""Send email whenever a file is uploaded to S3"""
body = {}
status_code = 200
email_body = str(context)
try:
s3 = boto3.client('s3')
ses = boto3.client('ses')
ses.send_email(Source = email_from,
Destination = {'ToAddresses': [email_to,],},
Message = {'Subject': {'Data': email_subject}, 'Body':{'Text' : {'Data': email_body}}}
)
except Exception as e:
print(traceback.format_exc())
status_code = 500
body["message"] = json.dumps(e)
response = {
"statusCode": 200,
"body": json.dumps(body)
}
return response
Here is the event json structure sent by S3 upon object creation:
http://docs.aws.amazon.com/AmazonS3/latest/dev/notification-content-structure.html
You can get the file names, sizes and source ip like this:
for record in event['Records']:
filename = record['s3']['object']['key'];
filesize = record['s3']['object']['size'];
source = record['requestParameters']['sourceIPAddress'];
eventTime = record['eventTime'];
def lambda_handler(event, context):
s3 = boto3.client('s3')
email_from = 'XXXXXXXXX#XXX.com'
email_to = 'XXXXXXXXX#XXX.com'
email_subject = 'new event on s3'
email_body = "File Name :" + event[u'Records'][0][u's3'][u'object'][u'key'] + "\n" + "File Size :" + str(event[u'Records'][0][u's3'][u'object'][u'size']) + "\n" + "Upload Time :" + event[u'Records'][0][u'eventTime'] + "\n" + "User Details :" + event[u'Records'][0][u'userIdentity'][u'principalId']
ses = boto3.client('ses')
ses.send_email(Source = email_from,
Destination = {'ToAddresses': [email_to,],},
Message = {'Subject': {'Data': email_subject}, 'Body':{'Text' : {'Data': email_body}}}
)
print("Function execution Completed !!!")