Why am I getting "datetime.datetime" error while using lambda? - amazon-web-services

I am currently using AWS Lambda to write this script:
import boto3
import json
s3 = boto3.client("s3")
def lambda_handler(event, context):
bucket = "www.onlinecodecompiler.com"
key = "aws_cli_bucket.txt"
try:
data = s3.get_object(Bucket=bucket, Key=key)
json_data = data["Body"].read()
return{
"response_code ": 200,
"data": data
}
except Exception as e:
print(e)
raise(e)
The txt file "aws_cli_bucket.txt" has the following sentence in it, "list all object in a bucket aws s3 ls s3:// bucket name". Its just a random text I had because I was just testing to see if lambda could read a text file from my S3 bucket but why am I getting an error stating
datetime.datetime(2021, 3, 22, 19, 50, 48, tzinfo=tzutc()) is not JSON serializable
I searched online and most people were getting this error because their code had something to do with date and time but mine does not.
Any help would be greatly appreciated.

The error stems from the response which you are getting from the s3.get_object which contains 3 datetime objects
{
'Body': StreamingBody(),
'DeleteMarker': True|False,
'AcceptRanges': 'string',
'Expiration': 'string',
'Restore': 'string',
'LastModified': datetime(2015, 1, 1),
...
'Expires': datetime(2015, 1, 1),
...
'ObjectLockRetainUntilDate': datetime(2015, 1, 1),
}
s3.get_object
Before returning the you also have to remove the Body key as well, which contains StreamingBody() otherwise it will error for that as well.
"errorMessage": "Unable to marshal response: <botocore.response.StreamingBody object at 0x7fa2370625d0> is not JSON serializable",
Depending on your needs you can simply delete those fields as do as you want.
import boto3
s3 = boto3.client('s3')
def lambda_handler(event, context):
try:
data = s3.get_object(Bucket='enrichedobjects1', Key='x.py')
json_data = data["Body"].read()
del data['Body']
del data['LastModified']
return{
"response_code ": 200,
"data": data
}
except Exception as e:
print(e)
raise(e)
The above code produces the following response
{
"response_code ": 200,
"data": {
"ResponseMetadata": {
"RequestId": "980JCEDBG0AWZH",
"HostId": "Oscwxx/5str0NFoWbnP/f5SAS4euhiJPQUXx7uOs205fowd7ICNtU=",
"HTTPStatusCode": 200,
"HTTPHeaders": {
"x-amz-id-2": "Oscwxx/5Xk3uv7LuFUDx7uOs205fowd7ICNtU=",
"x-amz-request-id": "980JC0KEDBWZH",
"date": "Mon, 22 Mar 2021 20:43:56 GMT",
"last-modified": "Sat, 20 Mar 2021 17:35:25 GMT",
"etag": "\"d41d8cd98f00b20498ecf8427e\"",
"accept-ranges": "bytes",
"content-type": "text/x-python",
"content-length": "0",
"server": "AmazonS3"
},
"RetryAttempts": 1
},
"AcceptRanges": "bytes",
"ContentLength": 0,
"ETag": "\"d41d8cd98f00b00998ecf8427e\"",
"ContentType": "text/x-python",
"Metadata": {}
}
}

Related

How to get aws cloudwatch metrics statistics using Boto3 AWS Lambda in Python?

I'm trying to getting a Metric DataPoints or Statics from an AWS/Lambda function, Here is the code i running:
import boto3
from datetime import timedelta
response = client.get_metric_statistics(
Namespace='AWS/Lambda',
MetricName='Invocations',
Dimensions=[
{
'Name': 'lambda',
'Value': 'my_lambda_name'
},
],
StartTime=datetime(2022, 10, 6),
EndTime=datetime(2022, 10, 8),
Period=300,
Statistics=[
'Sum'
],
#Unit='Count/Second'
#Unit='Count'
)
response
Currently I getting an Empty response as follows:
{'Label': 'Invocations',
'Datapoints': [],
'ResponseMetadata': {'RequestId': 'd18c70ee-dedd-410f-afc1-4c32f726175c',
'HTTPStatusCode': 200,
'HTTPHeaders': {'x-amzn-requestid': 'd18c70ee-dedd-410f-afc1-4c32f726175c',
'content-type': 'text/xml',
'content-length': '334',
'date': 'Fri, 07 Oct 2022 21:29:50 GMT'},
'RetryAttempts': 0}}
However, I really sure that the lambda is currently Invoking as the following graph shows:
I just find a solution assigning the Dimensions Name and Value as follows:
import boto3
from datetime import timedelta
response = client.get_metric_statistics(
Namespace='AWS/Lambda',
MetricName='Invocations',
Dimensions=[
{
'Name': 'FunctionName',
'Value': 'my_function_name'
},
],
StartTime=datetime(2022, 10, 6),
EndTime=datetime(2022, 10, 8),
Period=300,
Statistics=[
'Sum'
],
)
response

How to download the current version of a file from an S3 versioned bucket

I have objects with multiple versions and I am trying to compare which versions I can delete. I basically want to delete any version that has the same size of the current version.
The problem that I am having is that I can't find out which of the returned versions is the latest/current.
If I use the aws cli, it returns a field called 'IsLatest' but apparently, the boto3 version doesn't.
The aws cli also always returns the StorageClass while boto3 doesn't in some scenarios apparently.
Return from boto3:
{'ResponseMetadata': {'RequestId': 'PHQFMDCF3AHQM6R1', 'HostId': 'b7PmgsVm6y30wfA9GExS+Rc659cu1DI4YFec3i7tvDBew8ob5tY0Mtz6q+yC9nTwdmAoykdV7Lo=', 'HTTPStatusCode': 200, 'HTTPHeaders': {'x-amz-id-2': 'b7PmgsVm6y30wfA9GExS+Rc659cu1DI4YFeR3i7tVDBeu8ab5tY0Mtz6X+yC9nTwdmAoykdV7Lo=', 'x-amz-request-id': 'PHQFMDTB32HQM6R1', 'date': 'Sat, 19 Feb 2022 22:42:14 GMT', 'last-modified': 'Thu, 17 Feb 2022 17:02:54 GMT', 'etag': '"55f146382684970d4970ae31b3d4b310"', 'x-amz-server-side-encryption': 'AES256', 'x-amz-version-id': 'gHm2D2uuosJQS6GpmuySU9uNSXN84cq9', 'accept-ranges': 'bytes', 'content-type': 'text/plain', 'server': 'AmazonS3', 'content-length': '969'}, 'RetryAttempts': 0}, 'AcceptRanges': 'bytes', 'LastModified': datetime.datetime(2022, 2, 17, 17, 2, 54, tzinfo=tzutc()), 'ContentLength': 969, 'ETag': '"55f141382684970d4970ae31b3d4b310"', 'VersionId': 'gHa2D2uuosJQS6GpmuySU9uNSXN84cR9', 'ContentType': 'text/plain', 'ServerSideEncryption': 'AES256', 'Metadata': {}, 'Body': <botocore.response.StreamingBody object at 0x10f29e1c0>}
Versioning_Test/file1.txt
Response from aws cli:
{
"ETag": "\"55f141382684970d4970ae31b3d4b310\"",
"Size": 969,
"StorageClass": "STANDARD",
"Key": "Versioning_Test/file1.txt",
"VersionId": "gHa2D2uuosJQS6GpmuySU9uNSXN84cR9",
"IsLatest": true,
"LastModified": "2022-02-17T17:02:54+00:00",
"Owner": {
"ID": "1e5bc34834bec07ae1bc55a5d07adab10d7d58da04ae761769339a914d1ab472"
}
},
Here is my python script:
bucket_name = 'bucket-name'
profile_name = 'aws-profile-name'
key = ''
session = boto3.session.Session(profile_name=profile_name)
s3 = session.resource('s3')
versions = s3.Bucket(bucket_name).object_versions.filter()
for version in versions:
print(version.object_key)
obj = version.get()
print(obj)
#print("\t" + obj.get('VersionId'), obj.get('ContentLength'), obj.get('LastModified'), obj.get('IsLatest'), obj.get('StorageClass'))
I am missing something?
You can list your object versions from a bucket using list_object_versions API:
import boto3
bucket_name = 'bucket-name'
profile_name = 'aws-profile-name'
if __name__ == "__main__":
session = boto3.Session(profile_name=profile_name)
client = session.client('s3')
response = client.list_object_versions(Bucket=bucket_name)
for version in response['Versions']:
print(f'Key: {version["Key"]}, Size: {version["Size"]} bytes, Latest: {version["IsLatest"]}'
f' LastModified: {version["IsLatest"]}, StorageClass: {version["StorageClass"]}')
You can notice that the response from AWS contains an IsLatest property as well.

EC2 Metric executes with no errors but doesn't display as it should

Currently I have a function that records the average CPUUtilization of a running Instance.
But the problem is this function is not displaying the Average value percentage for some reason even though it shows the same on https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/cloudwatch.html under client.get_metric_statistics so I am not sure if one of the values is wrong but it's the correct AMI set.
#!/usr/bin/env python3
import sys
import boto3
import time
ec2 = boto3.resource('ec2', region_name = 'eu-west-1')
s3 = boto3.resource('s3')
keyname = 'key1.pem'
s3_resource = boto3.resource('s3')
user_data = '''#!/bin/bash
yum update -y
yum install httpd -y
systemctl enable httpd
systemctl start httpd'''
try:
resp = s3.create_bucket(ACL='private',Bucket='buket2',C$
print (resp)
except Exception as error:
print (error)
try:
s3_resource.Bucket('buket2').upload_file('image.jpg', 'image$
try:
gg = ec2.create_security_group(GroupName='Server', Description = '$
print (gg)
except Exception as error:
print (error)
response = sg.authorize_ingress(
IpPermissions=[
{
"FromPort": 22,
"ToPort": 22,
"IpProtocol": "tcp",
"IpRanges": [
{"CidrIp": "0.0.0.0/0", "Description": "Server"},
],
},
{
"FromPort": 80,
"ToPort": 80,
"IpProtocol": "tcp",
"IpRanges": [
{"CidrIp": "0.0.0.0/0", "Description": "Server1"},
],
},
],
)
instance = ec2.create_instances(
ImageId='ami-03odd1b743b23e5d2',
MinCount=1,
MaxCount=1,
InstanceType='t2.nano',
KeyName = 'key1.pem',
UserData = user_data,
SecurityGroupIds=[sg.group_id]
)
from datetime import datetime, timedelta
time.sleep(390)
client = boto3.client('cloudwatch')
response = client.get_metric_statistics(
Namespace='AWS/EC2',
MetricName='CPUUtilization',
Dimensions=[
{
'Name': 'AMIID',
'Value': 'ami-03odd1b743b23e5d2'
},
],
StartTime=datetime.now() - timedelta(seconds=300),
EndTime=datetime.now(),
Period=300,
Statistics=[
'Average',
],
Unit='Percent'
)
print(response)
for cpu in response['Datapoints']:
print(cpu)
s3.Bucket(name='buket2')
ec2.SecurityGroup(id='sg-06b84927ae5rd3ad1')
{'Label': 'CPUUtilization', 'Datapoints': [], 'ResponseMetadata': {'RequestId': 'ba4352d5-67ee-4d51-b03f-d1c532dbfe7', 'HTTPStatusCode': 200, 'HTTPHeaders': {'x-amzn-requestid': 'ba421b45-63dd-4d51-b03f-d14212e2fe7', 'content-type': 'text/xml', 'content-length': '337', 'date': 'Sun, 18 Jul 2021 00:26:57 GMT'}, 'RetryAttempts': 0}}
sg-06b84927ae5rd3ad1
Your StartTime=datetime(2021, 7, 17) and EndTime are in the past. Any newly created instance will not have any past data. You can try with current timestamp:
StartTime=datetime.now() - timedelta(seconds=300),
EndTime=datetime.now()

Youtube Video Insert returns "default" video resource

I'm trying to upload a video from an S3 bucket to YouTube, and getting back strange output that implies a successful post, but doesn't give anything expected back. As well, I set attributes like title and description in my code, but as you can see from the output, this isn't actually being set.
Example Output:
{
"id": "-pfZ_BNH9kg",
"snippet": {
"channelId": "UCZ5AUe-rp3rXKeFS0yx4ZBA",
"title": "unknown",
"channelTitle": "Patrick Hanford",
"publishedAt": "2020-04-30T19:22:15.000Z",
"thumbnails": {
"high": {
"url": "https://i.ytimg.com/vi/-pfZ_BNH9kg/hqdefault.jpg",
"height": 360,
"width": 480
},
"default": {
"url": "https://i.ytimg.com/vi/-pfZ_BNH9kg/default.jpg",
"height": 90,
"width": 120
},
"medium": {
"url": "https://i.ytimg.com/vi/-pfZ_BNH9kg/mqdefault.jpg",
"height": 180,
"width": 320
}
},
"localized": {
"title": "unknown",
"description": ""
},
"liveBroadcastContent": "none",
"categoryId": "20",
"description": ""
},
"etag": "Dn5xIderbhAnUk5TAW0qkFFir0M/3T1YGvGo1YyaTKtTpl8JrJqWS4M",
"status": {
"embeddable": true,
"privacyStatus": "public",
"uploadStatus": "uploaded",
"publicStatsViewable": true,
"license": "youtube"
},
"kind": "youtube#video"
}
Upload Code:
def post(self, attempts=None):
TEST_VIDEO = "http://streamon-perm.s3.amazonaws.com/WPHM-48k-pl-33366.mp4"
headers = {"Content-Type": "video/mp4"}
upload_request_body = {
"snippet": {
"title": "Test Video Upload",
"description": "This is a test of uploading videos.",
"categoryId": "22",
},
"status": {
"privacyStatus": "public"
},
"fileDetails": {
"fileName": TEST_VIDEO,
"fileType": "video"
}
}
params = {
"access_token": self.google_token.get("access_token", None),
"id": self.google_token.get("id_token", None),
"part": "snippet, status"
}
extra = {
"client_id": self.client_id,
"client_secret": self.client_secret
}
google_oauth_session = OAuth2Session(
self.client_id,
token=self.google_token,
auto_refresh_url=self.token_url,
auto_refresh_kwargs=extra,
token_updater=self._save_token
)
upload_response = google_oauth_session.post(
self.video_post_url,
headers=headers,
json=upload_request_body,
params=params
)
logger.info("Response from VIDEO UPLOAD: %s", repr(upload_response.content))
return True
I have also tried downloading the file from S3 and uploading with the file directly, and I get the same result. Without proper error messages or anything to go off of, I'm really not sure what to try next. Any help is greatly appreciated.
I have also tried using requests by itself rather than using oauthlib with exactly the same result.
def post(self, attempts=None):
if attempts is None:
attempts = 0
if self.neutered:
msg = "Youtube post() disabled by ENVIRONMENT variables."
logger.info(msg)
return msg
logger.info("Youtube post() entered with attempt # %s", self.post_attempts)
if self.google_token is None:
self.google_token = self._set_google_token()
attempts += 1
self.post(attempts=attempts)
headers = {
"Content-Type": "video/mp4",
"client_id": self.client_id,
"client_secret": self.client_secret,
"Authorization": "Bearer " + self.google_token["access_token"]
}
params = {
"access_token": self.google_token.get("access_token", None),
"id": self.google_token.get("id_token", None),
"part": "snippet, status"
}
upload_request_body = {
"snippet": {
"title": "Test Video Upload",
"description": "This is a test of uploading videos from POST.",
"categoryId": "22",
},
"status": {
"privacyStatus": "public"
},
"fileDetails": {
"fileName": TEST_VIDEO,
"fileType": "video"
}
}
upload_response = requests.post(
self.video_post_url,
params=params,
headers=headers,
json=upload_request_body
)
logger.info("Response from VIDEO UPLOAD: %s", repr(upload_response.content))
return True
I have also tried downloading the file from S3 and uploading with the file directly, and I get the same result.
Your have this issue probably due to the fact that you are not actually sending the file. upload_request_body.fileDetails.fileName is not the place for the link/file. It's just a description attribute.
Have you tried an auto-generated code from https://developers.google.com/youtube/v3/code_samples/code_snippets ?
This is what you can get there:
# -*- coding: utf-8 -*-
# Sample Python code for youtube.videos.insert
# NOTES:
# 1. This sample code uploads a file and can't be executed via this interface.
# To test this code, you must run it locally using your own API credentials.
# See: https://developers.google.com/explorer-help/guides/code_samples#python
# 2. This example makes a simple upload request. We recommend that you consider
# using resumable uploads instead, particularly if you are transferring large
# files or there's a high likelihood of a network interruption or other
# transmission failure. To learn more about resumable uploads, see:
# https://developers.google.com/api-client-library/python/guide/media_upload
import os
import googleapiclient.discovery
from googleapiclient.http import MediaFileUpload
def main():
# Disable OAuthlib's HTTPS verification when running locally.
# *DO NOT* leave this option enabled in production.
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
api_service_name = "youtube"
api_version = "v3"
DEVELOPER_KEY = "YOUR_API_KEY"
youtube = googleapiclient.discovery.build(
api_service_name, api_version, developerKey = DEVELOPER_KEY)
request = youtube.videos().insert(
part="snippet,status",
body={
"fileDetails": {
"fileName": "qwer",
"fileType": "video"
},
"snippet": {
"categoryId": "22",
"description": "This is a test of uploading videos.",
"title": "Test Video Upload"
},
"status": {
"privacyStatus": "public"
}
},
# TODO: For this request to work, you must replace "YOUR_FILE"
# with a pointer to the actual file you are uploading.
media_body=MediaFileUpload("YOUR_FILE")
)
response = request.execute()
print(response)
if __name__ == "__main__":
main()
I believe it should work.
Or is there any reason not to use googleapiclient?
I'm trying to upload a video from an S3 bucket to YouTube
I doubt that you can upload files from other sites directly to Youtube. Probably you are stuck with the option of uploading files from your own server/drive. I've looked up on the Internet but all I've found is that you can't (although you could in the past). And one can imagine a lot of reasons why this is not allowed (mostly copyright but not exclusively).
Update:
Probably, that was not an exhaustive code snippet. Especially, considering that you need OAuth2.
But here is another one:
https://github.com/youtube/api-samples/blob/master/python/upload_video.py
And yet another:
https://developers.google.com/youtube/v3/guides/uploading_a_video
With OAuth2. There you can also find information on client_secrets.json.
{
"web": {
"client_id": "[[INSERT CLIENT ID HERE]]",
"client_secret": "[[INSERT CLIENT SECRET HERE]]",
"redirect_uris": [],
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://accounts.google.com/o/oauth2/token"
}
}
Also you can checkout some real life projects. For example this one: https://github.com/HA6Bots/Automatic-Youtube-Reddit-Text-To-Speech-Video-Generator-and-Uploader/tree/master/Youtube%20Bot%20Video%20Generator

Getting EC2 instance information created using cloud formation template

I am creating one EC2 instance using below Cloud Formation template.
Named this template as 'dinesh.json'.
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Dinesh template",
"Resources" : {
"MyEC2Instance" : {
"Type" : "AWS::EC2::Instance",
"Properties" : {
"ImageId" : "ami-775e4f16",
"InstanceType" : "t2.micro",
"KeyName" : "****"
}
}
}
}
Now, using boto3 library, I am launching the above template.
import boto3
cft = boto3.client('cloudformation')
create_cft = cft.create_stack(StackName="Dinesh",TemplateURL=r'https://s3-us-west-2.amazonaws.com/dsp-bucket/dinesh.json')
print create_cft
This is running successfully and getting output as below :
{u'StackId': 'arn:aws:cloudformation:us-west-2:089691119308:stack/Dinesh/5b573240-548a-11e6-90a0-50a68a0bca36', 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '5b507be9-548a-11e6-8405-55192e2be20a', 'HTTPHeaders': {'x-amzn-requestid': '5b507be9-548a-11e6-8405-55192e2be20a', 'date': 'Thu, 28 Jul 2016 06:13:09 GMT', 'content-length': '376', 'content-type': 'text/xml'}}}
Now, I want to get information of above created EC2 instance like public IP, private IP and other info.
So, can any one please suggest me way how to retrieve the information of this specific EC2 instance?
Please let me know the various ways to doing the above thing apart from boto3.
Use the GetAtt function. For example if you have made an ec2 called bob, then adding this to your Output section will show the privateip
"Outputs": {
"AddressOfbob": {
"Description": "Domainame",
"Value": {
"Fn::GetAtt": [
"bob",
"PrivateIp"
]
}
}
}
Thanks #Vorsprung for the pointing towards right direction.
Along with this, I am adding a bit descriptive answer.
Method 1
Getting EC2 instance info when there is "Output" section present in cloud formation template
import boto3
import time
cft = boto3.client('cloudformation')
create_cft = cft.create_stack(StackName="Dinesh-1",TemplateURL=r'https://s3-us-west-2.amazonaws.com/bucket/dinesh.json')
print "Create Stack o/p - ",create_cft
#Just adding sleep so that stack creation come to the status of CREATE_COMPLETE
#More logic can be added to check the status of stack creation programmatically.
time.sleep(120)
des_stack = cft.describe_stacks(StackName="Dinesh-1")
print "Describe Stack o/p - ",des_stack
Output is
Create Stack o/p - {u'StackId': 'arn:aws:cloudformation:us-west-2:089691119308:stack/Dinesh-1/a92318a0-54a7-11e6-b050-50d0184f2', 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'a91c023-54a7-11e6-ba43-67cc9d6ed45b', 'HTTPHeaders': {'x-amzn-requestid': 'a91c023-54a7-11e6-ba43-67cc9d6ed45b', 'date': 'Thu, 28 Jul 2016 09:42:55 GMT', 'content-length': '378', 'content-type': 'text/xml'}}}
Describe Stack o/p - {u'Stacks': [{u'StackId': 'arn:aws:cloudformation:us-west-2:089691119308:stack/Dinesh-1/a92318a0-54a7-11e6-b050-50a0184f2', u'Description': 'Dinesh template', u'Tags': [], u'Outputs': [{u'Description': 'Private IP', u'OutputKey': 'PrivateIP', u'OutputValue': '172.3.28.221'}, {u'Description': 'Public IP', u'OutputKey': 'PublicIP', u'OutputValue': '52.5.203.173'}], u'CreationTime': datetime.datetime(2016, 7, 28, 9, 42, 55, 624000, tzinfo=tzutc()), u'StackName': 'Dinesh-1', u'NotificationARNs': [], u'StackStatus': 'CREATE_COMPLETE', u'DisableRollback': False}], 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'f19dc8ce-54a7-11e6-83e8-01451bce0ba', 'HTTPHeaders': {'x-amzn-requestid': 'f19dc8ce-54a7-11e6-83e8-01451b7ce0a', 'date': 'Thu, 28 Jul 2016 09:44:57 GMT', 'content-length': '1158', 'content-type': 'text/xml'}}}
In describe_stack output, you will get Public IP and Private IP of created EC2 instance.
Method 2
Getting EC2 instance info when there is no "Output" section present in cloud formation template
import boto3
import time
cft = boto3.client('cloudformation')
create_cft = cft.create_stack(StackName="Dinesh-2",TemplateURL=r'https://s3-us-west-2.amazonaws.com/dsp-bucket/dinesh.json')
print "Create Stack o/p - ",create_cft
#Just adding sleep so that stack creation come to the status of CREATE_COMPLETE
#More logic can be added to check the status of stack creation programmatically.
time.sleep(120)
list_stack_resp = cft.list_stack_resources(StackName="Dinesh-2")
print list_stack_resp
Output is
Create Stack o/p - {u'StackId': 'arn:aws:cloudformation:us-west-2:089691119308:stack/Dinesh-2/7238154a8-11e6-9694-50a686be73f2', 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '7234f160-54a8-11e6-bda6-ef311cece04b', 'HTTPHeaders': {'x-amzn-requestid': '7234f160-54a8-11e6-bda6-ef311cece04b', 'date': 'Thu, 28 Jul 2016 09:48:32 GMT', 'content-length': '378', 'content-type': 'text/xml'}}}
{'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'baabaa79-54a8-11e6-90e7-9ba061bfa4c', 'HTTPHeaders': {'x-amzn-requestid': 'baabaa79-54a8-11e6-90e7-9bad061bf4c', 'date': 'Thu, 28 Jul 2016 09:50:33 GMT', 'content-length': '687', 'content-type': 'text/xml'}}, u'StackResourceSummaries': [{u'ResourceType': 'AWS::EC2::Instance', u'PhysicalResourceId': 'i-059f15aa', u'LastUpdatedTimestamp': datetime.datetime(2016, 7, 28, 9, 49, 23, 481000, tzinfo=tzutc()), u'ResourceStatus': 'CREATE_COMPLETE', u'LogicalResourceId': 'MyEC2Instance'}]}
From the output of list_stack_resource, get the 'PhysicalResourceId' which is 'i-059f15aa' in this case.
Then get the output of describe_instance of ec2 to get full info of EC2 instance created above.
import boto3
ec2 = boto3.client('ec2')
ec2_resp = ec2.describe_instances(InstanceIds=['i-059f15aa'])
print ec2_resp
Output is
{u'Reservations': [{u'OwnerId': '089691119308', u'ReservationId': 'r-7245ddb6', u'Groups': [], u'Instances': [{u'Monitoring': {u'State': 'disabled'}, u'PublicDnsName': 'ec2-52-42-17-44.us-west-2.compute.amazonaws.com', u'State': {u'Code': 16, u'Name': 'running'}, u'EbsOptimized': False, u'LaunchTime': datetime.datetime(2016, 7, 28, 9, 48, 37, tzinfo=tzutc()), u'PublicIpAddress': '52.42.10.44', u'PrivateIpAddress': '172.3.29.25', u'ProductCodes': [], u'VpcId': 'vpc-c60a2aa3', u'StateTransitionReason': '', u'InstanceId': 'i-059f15aa', u'ImageId': 'ami-775e4f16', u'PrivateDnsName': 'ip-172-31-29-25.us-west-2.compute.internal', u'KeyName': 'dsp', u'SecurityGroups': [{u'GroupName': 'default', u'GroupId': 'sg-53fdaa37'}], u'ClientToken': 'Dines-MyEC2-DJ1D05Q7A088', u'SubnetId': 'subnet-8d0136e8', u'InstanceType': 't2.micro', u'NetworkInterfaces': [{u'Status': 'in-use', u'MacAddress': '02:9f:ab:4a:3c:0b', u'SourceDestCheck': True, u'VpcId': 'vpc-c60a2aa3', u'Description': '', u'Association': {u'PublicIp': '52.42.170.44', u'PublicDnsName': 'ec2-52-42-170-44.us-west-2.compute.amazonaws.com', u'IpOwnerId': 'amazon'}, u'NetworkInterfaceId': 'eni-d5272ca8', u'PrivateIpAddresses': [{u'PrivateDnsName': 'ip-172-31-29-25.us-west-2.compute.internal', u'Association': {u'PublicIp': '52.42.170.44', u'PublicDnsName': 'ec2-52-42-170-44.us-west-2.compute.amazonaws.com', u'IpOwnerId': 'amazon'}, u'Primary': True, u'PrivateIpAddress': '172.31.29.25'}], u'PrivateDnsName': 'ip-172-31-29-25.us-west-2.compute.internal', u'Attachment': {u'Status': 'attached', u'DeviceIndex': 0, u'DeleteOnTermination': True, u'AttachmentId': 'eni-attach-f33c375f', u'AttachTime': datetime.datetime(2016, 7, 28, 9, 48, 37, tzinfo=tzutc())}, u'Groups': [{u'GroupName': 'default', u'GroupId': 'sg-53fdaa37'}], u'SubnetId': 'subnet-8d0136e8', u'OwnerId': '089691119308', u'PrivateIpAddress': '172.31.29.25'}], u'SourceDestCheck': True, u'Placement': {u'Tenancy': 'default', u'GroupName': '', u'AvailabilityZone': 'us-west-2b'}, u'Hypervisor': 'xen', u'BlockDeviceMappings': [{u'DeviceName': '/dev/sda1', u'Ebs': {u'Status': 'attached', u'DeleteOnTermination': True, u'VolumeId': 'vol-21bddea8', u'AttachTime': datetime.datetime(2016, 7, 28, 9, 48, 37, tzinfo=tzutc())}}], u'Architecture': 'x86_64', u'RootDeviceType': 'ebs', u'RootDeviceName': '/dev/sda1', u'VirtualizationType': 'hvm', u'Tags': [{u'Value': 'arn:aws:cloudformation:us-west-2:089691119308:stack/Dinesh-2/723ba810-54a8-11e6-9694-50a686be73f2', u'Key': 'aws:cloudformation:stack-id'}, {u'Value': 'MyEC2Instance', u'Key': 'aws:cloudformation:logical-id'}, {u'Value': 'Dinesh-2', u'Key': 'aws:cloudformation:stack-name'}], u'AmiLaunchIndex': 0}]}], 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '8adf8956-0d5a-4d1f-a821-67fec4b5bbf9', 'HTTPHeaders': {'transfer-encoding': 'chunked', 'vary': 'Accept-Encoding', 'server': 'AmazonEC2', 'content-type': 'text/xml;charset=UTF-8', 'date': 'Thu, 28 Jul 2016 09:55:45 GMT'}}}