Im working on a project with AWS, and I've created an endpoint that I've connected to and SQS queue that then triggers a Lambda function. The Lambda should write data to a database but for some reason it doesn't. When I try to trigger it manually with a request like this
{
"Records": [
{
"messageId": "11d6ee51-4cc7-4302-9e22-7cd8afdaadf5",
"receiptHandle": "AQEBBX8nesZEXmkhsmZeyIE8iQAMig7qw...",
"body": {
"duration": "1230",
"player_1": "UR-da336be50ba9b8e53b8",
"player_2": "UR-a67322a021284404128",
"status_1": 1,
"status_2": 0
},
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1573251510774",
"SequenceNumber": "18849496460467696128",
"MessageGroupId": "1",
"SenderId": "AIDAIO23YVJENQZJOL4VO",
"MessageDeduplicationId": "1",
"ApproximateFirstReceiveTimestamp": "1573251510774"
},
"messageAttributes": {},
"md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:fifo.fifo",
"awsRegion": "us-east-2"
},
{
"messageId": "11d6ee51-4cc7-4302-9e22-7cd8afdaadf5",
"receiptHandle": "AQEBBX8nesZEXmkhsmZeyIE8iQAMig7qw...",
"body": {
"duration": "5510",
"player_1": "UR-da336be50ba9b8e53b8",
"player_2": "UR-a67322a021284404128",
"status_1": 1,
"status_2": 0
},
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1573251510774",
"SequenceNumber": "18849496460467696128",
"MessageGroupId": "1",
"SenderId": "AIDAIO23YVJENQZJOL4VO",
"MessageDeduplicationId": "1",
"ApproximateFirstReceiveTimestamp": "1573251510774"
},
"messageAttributes": {},
"md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:fifo.fifo",
"awsRegion": "us-east-2"
}
]
}
It works fine, but when it get called from SQS nothing happens and it deletes the message from SQS.
The code for my Lambda:
const { Client } = require("pg");
const client = new Client({
user: process.env.POSTGRES_USER,
host: process.env.POSTGRES_HOST,
database: process.env.POSTGRES_DATABASE,
password: process.env.POSTGRES_PASSWORD,
port: parseInt(process.env.POSTGRES_PORT),
});
client.connect();
async function asyncForEach(array, callback) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
exports.handler = async (event) => {
try {
await asyncForEach(event.Records, async (record) => {
//Writes the game info to the DB
const result = await client.query(
`INSERT INTO game_data (duration) VALUES (
${record.body.duration}
) RETURNING game_id`
);
const res = await Promise.all([
client.query(
`INSERT INTO player_game_data (user_id,game_id,player_game_status) VALUES (
'${record.body.player_1}',
'${result.rows[0].game_id}',
${record.body.status_1}
)`
),
client.query(
`INSERT INTO player_game_data (user_id,game_id,player_game_status) VALUES (
'${record.body.player_2}',
'${result.rows[0].game_id}',
${record.body.status_2}
)`
),
]);
}
);
return{
statusCode: 200}
} catch (error) {
return {
statusCode: 400,
error: error.message,
};
}
};
I've tested the queue and it works fine so the problem is probably somewhere here..
The problem is that the JSON you send to SQS gets converted to a string so you need to convert it back to a JSON in your Lambda using JSON.parse().
The request that comes through SQS looks something like this:
{
"Records": [
{
"messageId": "bc62a976-06ef-4727-8bb5-8d7b0a474f7d",
"receiptHandle": "AQEBd4oxuMTytPn8AWORy992aGqGO5By+pM1x2dtZpyn0n8cxTJEd9/BXemUnAAbU+tx1jRlsRWCYhPnrrBvCj91nUpw5gT10WGkuQcv6fCH+ePqqON6sIHy9+8csqhzCwphDqdA23SLfidEGMwuW8mvNN+Lh541vfgHoYSQhMv51qLjHADbiSUzfsIYVnvmqU+C3D55OX/OhDOJoWY87XIEjpSEqRKx4s8wTF6edpYyun0IBYUA68W5CFkg+RBuWPeKsGLNENCvCpawcknYOCKrxeMrWRTh73qHZzH6QnNTO5S4fzQONKH2MWjFsIy7T01w1feNSD3qt/m3vakWhQnhi8VDn9KUJCIdKbhxpdqZB3QSPAKvfjRtEkwXQu2pGUpezMtWbNmsQfaEw84+7BV/CQ==",
"body": "{\r\n \"duration\": \"69\",\r\n \"player_1\": \"UR-da336be50ba9b8e53b8\",\r\n \"player_2\": \"UR-a67322a021284404128\",\r\n \"status_1\": 0,\r\n \"status_2\": 1\r\n}",
"attributes": {
"ApproximateReceiveCount": "1",
"AWSTraceHeader": "Root=1-5f454d30-8772a1ac004584ac5e0cbf48",
"SentTimestamp": "1598377264745",
"SenderId": "AROAYLJPJR5FJH6OQCRDF:BackplaneAssumeRoleSession",
"ApproximateFirstReceiveTimestamp": "1598377264750"
},
"messageAttributes": {},
"md5OfBody": "0be85f29f6c6dd29e328b58a01e3db2a",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:us-east-1:574014197578:dev-Post-GameResults",
"awsRegion": "us-east-1"
}
]
}
After I parsed the body everything worked as intended..
Code after edit:
const { Client } = require("pg");
const client = new Client({
user: process.env.POSTGRES_USER,
host: process.env.POSTGRES_HOST,
database: process.env.POSTGRES_DATABASE,
password: process.env.POSTGRES_PASSWORD,
port: parseInt(process.env.POSTGRES_PORT),
});
client.connect();
async function asyncForEach(array, callback) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
exports.handler = async (event) => {
try {
await asyncForEach(event.Records, async (record) => {
var body = JSON.parse(record.body);
//Writes the game info to the DB
const result = await client.query(
`INSERT INTO game_data (duration) VALUES (${body.duration}) RETURNING game_id`
);
const res = await Promise.all([
client.query(
`INSERT INTO player_game_data (user_id,game_id,player_game_status) VALUES (
'${body.player_1}',
'${result.rows[0].game_id}',
${body.status_1}
)`
),
client.query(
`INSERT INTO player_game_data (user_id,game_id,player_game_status) VALUES (
'${body.player_2}',
'${result.rows[0].game_id}',
${body.status_2}
)`
),
]);
}
);
return{
statusCode: 200
}
} catch (error) {
return {
statusCode: 400,
error: error.message,
};
}
};
Related
I'm using AWS CDK Step Function constructs to create a simple workflow. I can invoke the first Lambda fine and that subsequently invokes next Lambda. However, on the second Lambda my input is fine as expected but the output from lambda task always returns Payload: null as response. I do not intend this behaviour and want to return data inside the Lambda via output key to be passed to next task.
export const bulkSummaryHandler = (event) => {
try {
console.log('LAMBA SUMMARY!', event);
return { output: { status: 'finished' } };
} catch (error) {
return handleError(error);
}
};
My CDK code
const getUserCsvFileTask = new tasks.LambdaInvoke(ctx.stack, 'getUserCsvFileTask', {
lambdaFunction: getUserCsvFileFn,
comment: 'fetch user uploaded csv from csv-integration-service',
inputPath: '$',
resultPath: '$.taskResult',
outputPath: '$.taskResult.Payload'
});
const bulkSummaryTask = new tasks.LambdaInvoke(ctx.stack, 'bulkProcessingSummaryTask', {
lambdaFunction: bulkSummaryFn,
comment: 'summarise bulk processing',
inputPath: '$'
});
const definition = stepfunctions.Chain.start(getUserCsvFileTask).next(bulkSummaryTask).next(nextLambdaTask);
The response I get from second Lambda 'bulk summary task' invoked in Payload Key is always null. It's not clear to me why I am getting null and I'm out of ideas as to why. Any ideas would be great help.
{
"ExecutedVersion": "$LATEST",
"Payload": null,
"SdkHttpMetadata": {
"AllHttpHeaders": {
"X-Amz-Executed-Version": [
"$LATEST"
],
"x-amzn-Remapped-Content-Length": [
"0"
],
"Connection": [
"keep-alive"
],
"x-amzn-RequestId": [
"fed8b1bd-d188-4425-ade7-ce2723aef4c8"
],
"Content-Length": [
"4"
],
"Date": [
"Wed, 21 Sep 2022 22:54:00 GMT"
],
"X-Amzn-Trace-Id": [
"root=1-632b9607-0e451e4c5dd4c21c7a3eaa8b;sampled=1"
],
"Content-Type": [
"application/json"
]
},
"HttpHeaders": {
"Connection": "keep-alive",
"Content-Length": "4",
"Content-Type": "application/json",
"Date": "Wed, 21 Sep 2022 22:54:00 GMT",
"X-Amz-Executed-Version": "$LATEST",
"x-amzn-Remapped-Content-Length": "0",
"x-amzn-RequestId": "fed8b1bd-d188-4425-ade7-ce2723aef4c8",
"X-Amzn-Trace-Id": "root=1-632b9607-0e451e4c5dd4c21c7a3eaa8b;sampled=1"
},
"HttpStatusCode": 200
},
"SdkResponseMetadata": {
"RequestId": "fed8b1bd-d188-4425-ade7-ce2723aef4c8"
},
"StatusCode": 200
}
Ah I've been incredibly stupid. The handler needs to be async there's no callback, it won't return anything.
export const bulkSummaryHandler = async (event) => {
try {
console.log('LAMBA SUMMARY!', event);
return { output: { status: 'finished' } };
} catch (error) {
return handleError(error);
}
};
I'm trying to update object from DynamoDB for hours and I can't get it to work. I'm using DocumentClient library and its method update(). When I tested it with API Gateway, I got this error:
{
"errorType": "TypeError",
"errorMessage": "Cannot read property '_id' of undefined",
"trace": [
"TypeError: Cannot read property '_id' of undefined",
" at Runtime.exports.handler (/var/task/index.js:20:44)",
" at Runtime.handleOnce (/var/runtime/Runtime.js:66:25)"
]
}
Here is my code:
exports.handler = (event, context, callback) => {
console.log(event);
const id = event.listId;
const params = {
Key: {
"ListId": id
},
ExpressionAttributeNames: {
"#name": "Name",
"#shop": "Shop"
},
ExpressionAttributeValues: {
":name": event.listName,
":shop": {
"_id": event.listShop._id,
"name": event.listShop.name,
"address": event.listShop.address,
"city": event.listShop.city
}
},
TableName: "mk-lists",
UpdateExpression: "SET #name = :name, #shop = :shop"
};
dynamodb.update(params, (err, data) => {
if(err) {
console.log(err);
callback(err);
} else {
console.log(data);
callback(null, data);
}
});
};
I have Shop field in Lists table which is an object. Also, it is working when I test it in Lambda function. Can someone help me with this? Thanks in advance.
Here is my request body:
{
"listName": "Lista 13131",
"listShop": {
"_id": "933c836c-6868-4f56-a769-d59f5cbb231e",
"name": "DIS",
"address": "Podrinska 12",
"city": "Uzice"
}
}
Is long polling available on GCP PubSub JS SDK?
I want to be able to process multiple PubSub messages at once for example:
There are 1000 messages being sent to the topic every in a span of 1 minute.
Given that, for the next 10 secs, there will be 50 messages to be sent to a topic. I want my subscription to a long poll for 10 secs so that instead of processing each message. It will wait for 10 secs and potentially got all the 50messages.
AWS JS SDK has this feature, I was hoping I can do it on GCP as well.
https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/sqs-examples-enable-long-polling.html
This is an example of how it will work on AWS:
The SQS queue have more than 5 messages.
The listener receiveMessage will get 5 messages at once in a single receive. event
// Load the AWS SDK for Node.js
var AWS = require("aws-sdk");
// Set the region
AWS.config.update({ region: "REGION" });
// Set the AWS Region
const REGION = "us-east-1"; //e.g. "us-east-1"
// Set the parameters
const queueURL =
"https://sqs.us-east-1.amazonaws.com/763335115465/long-polling-per-message"; // SQS_QUEUE_URL
const params = {
AttributeNames: ["SentTimestamp"],
MaxNumberOfMessages: 5,
MessageAttributeNames: ["All"],
QueueUrl: queueURL,
WaitTimeSeconds: 20,
};
// Create SQS service object
const sqs = new AWS.SQS({
region: REGION,
credentials: {
accessKeyId: "xx",
secretAccessKey: "xxx",
},
});
sqs.receiveMessage(params, function (err, data) {
console.log({ err, data: JSON.stringify(data) });
if (err) {
console.log("Receive Error", err);
} else if (data.Messages) {
var deleteParams = {
QueueUrl: queueURL,
ReceiptHandle: data.Messages[0].ReceiptHandle,
};
sqs.deleteMessage(deleteParams, function (err, data) {
if (err) {
console.log("Delete Error", err);
} else {
console.log("Message Deleted", data);
}
});
}
});
{
"ResponseMetadata": { "RequestId": "25295507-c4ae-5106-a499-0d7808c163b8" },
"Messages": [
{
"MessageId": "5dbd863e-2c50-49c8-9c4b-9f70e8db8d17",
"ReceiptHandle": "asdf",
"MD5OfBody": "78ef53e38c997c445f2fe1cc63c13139",
"Body": "Test5",
"Attributes": { "SentTimestamp": "1610991641728" }
},
{
"MessageId": "09baf624-f2ee-4173-83ed-e74c0516a7e6",
"ReceiptHandle": "asdf",
"MD5OfBody": "c454552d52d55d3ef56408742887362b",
"Body": "Test2",
"Attributes": { "SentTimestamp": "1610991983369" }
},
{
"MessageId": "1cac914f-d946-434a-87a0-974b14cc2eba",
"ReceiptHandle": "asdf",
"MD5OfBody": "b3f66ec1535de7702c38e94408fa4a17",
"Body": "Test3",
"Attributes": { "SentTimestamp": "1610991986299" }
},
{
"MessageId": "95c2c8ad-fc7a-451a-b967-8ce1736a4cab",
"ReceiptHandle": "asdf",
"MD5OfBody": "f178860b5109214d9f3debe19a7800d3",
"Body": "Test7",
"Attributes": { "SentTimestamp": "1610991998129" }
},
{
"MessageId": "3711fa29-9bbc-418d-a35f-7adbd7daa952",
"ReceiptHandle": "asd",
"MD5OfBody": "b6e30158b9d7d2dc8bb4f4123fe93c9b",
"Body": "Test10",
"Attributes": { "SentTimestamp": "1610992008975" }
}
]
}
The Cloud Pub/Sub JS library provides a streaming, per-message API for receiving messages from subscriptions. There isn't a way to tell the library to give you a batch of N messages, so you'll have to implement it yourself.
Yes, you have it and it's documented here. Set the value of the timeout that you want to cancel the subscription. So that, if you want to wait 10 more seconds, add 10.000 millis!
I am trying to get data from my DynamoDB table called dashboard so I am testing out the Lambda function with a sample from the table.
But all I am getting back from the test is :
Response:
{
"statusCode": 200,
"body": "\"Hello from Lambda!\""
}
It should just return the data from the table that matches it based on the ID as that is what I use to partition the table.
Dashboard example data which is also the test I made
{
"batteryLevel": 35,
"deviceId": "yxftd9pnitd-156xhref9g69a",
"eventId": "c07e3f9f-f6bb-4792-be6f-a9be95cdff38",
"id": 12345,
"location": {
"accuracy": 35.369,
"latitude": 55.8256671,
"longitude": 37.5962931
},
"tags": [
{
"accelX": 0.012,
"accelY": -0.004,
"accelZ": 1.008,
"createDate": "2020-08-11T18:51:58+0300",
"dataFormat": 5,
"defaultBackground": 2,
"favorite": true,
"humidity": 32.8425,
"id": "E5:F1:98:34:C0:0F",
"measurementSequenceNumber": 62865,
"movementCounter": 21,
"name": "Kitchen",
"pressure": 98702,
"rssi": -43,
"temperature": 25.58,
"txPower": 4,
"updateAt": "2020-08-18T19:57:48+0300",
"voltage": 3.013
}
],
"time": "2020-08-18T19:57:48+0300"
}
Lambda Function
"use strict";
const AWS = require("aws-sdk");
AWS.config.update({ region: "ap-southeast-1" });
exports.handler = async (event, context) => {
const ddb = new AWS.DynamoDB({ apiVersion: "2012-10-08" });
const documentClient = new AWS.DynamoDB.DocumentClient({ region: "ap-southeast-1"});
const params = {
TableName: "dashboard",
Key: {
id: 12345
}
};
try {
const data = await documentClient.get(params);
console.log(data);
} catch (err) {
console.log(err);
}
};
Based on the comments.
The issue was caused by not deploying the function after adding new code. Subsequently, the previously deployed version (i.e. "Hello from Lambda") was being executed.
The solution was to deploy the new function.
I have got lambda backed Custom Stack in CloudFormation , So I need the fetch function output and put it to the AWS Console, how I can handle this problem?
My Stack is shown as below ;
"CreateExistingVPC": {
"Type": "Custom::CreateExistingVPC",
"Properties": {
"ServiceToken": { "Fn::If": ["LambdaAvailable",{ "Fn::GetAtt": [ "CustomLogic", "Outputs.LambdaAttachHostedZoneArn" ] }, { "Ref": "AWS::NoValue" } ] },
"Region": { "Ref": "AWS::Region" },
"HostedZoneId": { "Ref": "InternalHostedZone" },
"VpcId": { "Fn::GetAtt": [ "VPC", "Outputs.VPC" ] }
}
}
},
"Outputs": {
"Route53VPC": {
"Description": "ExistingRoute53VPCStatus",
"Value": { "Fn::GetAtt": [ "CreateExistingVPC", "{ ??????? }" ] }
}
}
In actually, I have found some answers but 'response key' not worked in my case , how I can find response key ??
AWS Cloudformation, Output value from Custom Resource
You need to use the variable you are using to return your response. e.g. (nodeJs)
module.exports.createPoolList = (event, context, callback) => {
if (event.RequestType == 'Create') {
let array = event.ResourceProperties.OpsPoolArnList.split(",");
array.push(event.ResourceProperties.UserPool);
let response = {
'list': array.join(),
};
sendresponse(event, "SUCCESS", response, "");
}
if (event.RequestType == 'Delete') {
sendresponse(event, "SUCCESS", null, "");
}
callback(null, "");
};
Here list is the variable which contains my output & returning in my response. The built payload looks like
let payload = {
'StackId': event.StackId,
'Status' : responsestatus,
'Reason' : reason,
'RequestId': event.RequestId,
'LogicalResourceId': event.LogicalResourceId,
'PhysicalResourceId': event.LogicalResourceId + 'qwerty',
'Data': response
};
And I refer to this in my script as
!GetAtt <ResourceName>.list
Hope it helps.