I have a python function that invokes an AWS Lambda function.
#lambda.py
import boto3
import os
client = boto3.client('lambda')
MY_LAMBDA = os.environ['MY_LAMBDA']
def invoke_function(input):
response = client.invoke(
FunctionName=MY_LAMBDA,
InvocationType='RequestResponse',
Payload=json.dumps(input)
)
How can I create a Unit Test for this function? I have been using Moto for other AWS services, but haven't been able to make it work for Lambda.
My attempt at using moto:
#test_lambda.py
from unittest.mock import MagicMock, patch
from unittest.mock import ANY
from moto import mock_lambda
import boto3
import os
import zipfile
import io
import lambda
class LambdaTest(unittest.TestCase):
def get_test_zip_file(self):
pfunc = '''
def lambda_handler(event, context):
return event
'''
zip_output = io.BytesIO()
zip_file = zipfile.ZipFile(zip_output, 'w', zipfile.ZIP_DEFLATED)
zip_file.writestr('lambda_function.py', pfunc)
zip_file.close()
zip_output.seek(0)
return zip_output.read()
#mock_lambda
def test_invoke_requestresponse_function(self):
conn = boto3.client('lambda', 'us-east-1')
conn.create_function(
FunctionName='test-func',
Runtime='python3.8',
Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={
'ZipFile': self.get_test_zip_file(),
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True
)
sample_input = {'msg': 'Test Input'}
result = lambda.invoke_function(sample_input)
This errors out with:
botocore.exceptions.ClientError: An error occurred (404) when calling the Invoke operation:
The boto3-client in lambda.py is initialized before any of the mocking takes place. As that client doesn't know it's being mocked, it probably tries to talk to AWS itself.
For you particular test case, there are a few solutions:
Place import lambda in the test itself, so that the
boto3-client is created after the decorators have initialized
Override the client with the mocked version: lambda.client =
conn
Pass the mocked client as an argument to
lambda.invoke_function(conn, sample_input)
Related
I'm currently developing a lambda that invokes another Lambda with Boto3. However, I need from one statement to retrieve a group of results and send them through the invoke payload to the other Lambda. However, I can't find how to send this function as a parameter to call another Lambda and pass through parameters a function that returns a set of information.
I have implemented this method:
from MysqlConnection import MysqlConnection
from sqlalchemy import text
def make_dataframe(self):
conn = MysqlConnection()
query = text("""select * from queue WHERE estatus = 'PENDING' limit 4;""")
df = pd.read_sql_query(query,conn.get_engine())
return df.to_json()
This is the Lambda handler:
import json
import boto3
from MysqlConnection import MysqlConnection
from Test import Test
client = boto3.client('lambda')
def lambda_handler(event, context):
mydb = MysqlConnection()
print(mydb.get_engine)
df = Test()
df.make_dataframe()
object = json.loads(df.make_dataframe())
response = client.invoke(
FunctionName='arn:aws:lambda:',
InvocationType='RequestResponse'#event
Payload=json.dumps(object)
)
responseJson = json.load(response['Payload'])
print('\n')
print(responseJson)
print('\n')
What you're doing is correct in terms of structuring your call.
I assume the problem is with your payload structure and whether its stringified.
I would try invoke your lambda with an empty payload and see what happens. If it works with empty payload then its your payload serialising, if it doesnt work with empty payload then its something else.
In cloudwatch what do your logs of both your "runner" lambda and your "target" lambda say?
It might also be a permissions thing - you will need to specify and grant execute permissions on your runner lambda.
after days of refactoring and research I am sharing the answer. It is about packing the json.dump object and inside the handler place the method with the response already packed
This a method to parent child
class Test:
def make_dataframe(self):
conn = MysqlConnection()
query = text("""select * from TEST WHERE status'PEN' limit 4;""")
df = pd.read_sql_query(query,conn.get_engine())
lst = df.values.tolist()
obj = json.dumps(lst, cls=CustomJSONEncoder)
return obj
def lambda_handler(event, context):
mydb = MysqlConnection()
df = Test()
response = client.invoke(
FunctionName='arn:aws:lambda:',
InvocationType='RequestResponse',
Payload= df.make_dataframe()
)
responseJson = json.load(response['Payload'])
print('\n')
print(responseJson)
print('\n')
`
I am trying to create a python3.7 lambda which correctly uses asyncio for threading.
I have tried many different code variations but here is the latest block. I am using AWS Xray to look at the timing and it is easy to verify that the async is not working correctly. All these tasks and calls are being done synchronously.
import json
import boto3
import asyncio
from botocore.exceptions import ClientError
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core import patch_all
#xray
patch_all()
def lambda_handler(event, context):
tasks = []
dict_to_populate = {}
for item in list:
tasks.append(asyncio.ensure_future(do_work(item, dict_to_populate)))
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.gather(*tasks))
loop.close
async def do_work(item, dict_to_populate):
#assume regions are obtained
for region in regions:
response_vpcs = describe_vpcs(obj['Id'], session_assumed, region)
if 'Vpcs' in response_vpcs:
for vpc in response_vpcs['Vpcs']:
#process
I expect to see the do_work functions started at essentially the same time (asynchronously) but they are all synchronous according to XRAY. It is processing synchronously and is populating dict_to_populate as expected.
This is how i have done in my aws lambda, I wanted to do 4 post request and then collect all the responses. Hope this helps.
loop = asyncio.get_event_loop()
if loop.is_closed():
loop = asyncio.new_event_loop()
#The perform_traces method i do all the post method
task = loop.create_task(perform_traces(payloads, message, contact_centre))
unique_match, error = loop.run_until_complete(task)
loop.close()
In the perform_trace method this is how i have used wait with session
future_dds_responses = []
async with aiohttp.ClientSession() as session:
for payload in payloads:
future_dds_responses.append(dds_async_trace(session, payload, contact_centre))
dds_responses, pending = await asyncio.wait(future_dds_responses)
In dds_async_trace this is how i have done the post using the aiohttp.ClientSession session
async with session.post(pds_url,
data=populated_template_payload,
headers=PDS_HEADERS,
ssl=ssl_context) as response:
status_code = response.status
I need to make two lambda function, one call the other with parameter, the called function print the parameter out.I got trouble to make it works:
The first function:
from __future__ import print_function
import boto3
import json
lambda_client = boto3.client('lambda')
def lambda_handler(event, context):
invoke_response = lambda_client.invoke(FunctionName="called-function",
InvocationType='Event',
Payload=json.dumps('hello Jenny'))
)
print(invoke_response)
Please advise what code should I put in the called-function in order to receive the parameter 'hello Jenny'?
Thank you
The Payload supplied in the params will be available as the event of the Lambda being invoked.
def add(event, context):
# event is 'hello Jenny'
return event
I'm trying to create RDS MySQL DB snapshot using aws lambda function. I want to create db snapshot name with time stamp (not snapshot creation time).
Example: For below code I'm expecting 'mydb-2017-08-24-06-12' as a db snapshot name.
import boto3
import datetime
def lambda_handler(event, context):
client = boto3.client('rds')
i = datetime.datetime.now()
response = client.create_db_snapshot(
DBSnapshotIdentifier="mydb" % (i),
DBInstanceIdentifier='mydb'
)
but it throwing below error:
DBSnapshotIdentifier="mydb" % (i),
TypeError: not all arguments converted during string formatting
Please provide any type of relevant solution to me.
I've fixed my code like this:
import boto3
import datetime
def lambda_handler(event, context):
client = boto3.client('rds')
x = datetime.datetime.now().strftime("mydb-%Y-%m-%d-%H-%M-%S")
response = client.create_db_snapshot(
DBSnapshotIdentifier= x,
DBInstanceIdentifier='mydb'
)
I am trying to implement a service which checks if the logged in user is on a datastore, if yes returns True, if not returns False.
Here is the code I am using:
import endpoints
from google.appengine.ext import ndb
from protorpc import remote
from protorpc import messages
from endpoints_proto_datastore.ndb import EndpointsModel
from google.appengine.api import users
class AuthRes(messages.Message):
message = messages.StringField(1)
class UserModel(EndpointsModel):
user = ndb.UserProperty()
#endpoints.api(name='myapi', version='v1', description='My Little API')
class MyApi(remote.Service):
#UserModel.method(path='myuser', http_method='GET', name='myuser.check')
def UserCheck(self, cls):
user = users.get_current_user()
if user:
myuser = cls.query().filter(cls.user.user_id() == user.user_id()).get()
if not myuser:
return AuthRes(message="False")
else:
return AuthRes(message="True")
else:
return AuthRes(message="False")
application = endpoints.api_server([MyApi], restricted=False)
I always get 'AuthRes' object has no attribute 'ToMessage'
I believe instead of this:
#UserModel.method(path='myuser', http_method='GET', name='myuser.check')
you want this:
from protorpc import message_types # add at the top
#endpoints.method(message_types.VoidMessage, AuthRes, path='myuser', http_method='GET', name='myuser.check')