I'm new to aws and I have a strange problem of getting the body of event inside my lamda handler function.
exports.handler = async (event) => {
const response = {
statusCode: 200,
body: event.body
};
return response;
};
When I run test I get
Response:
{
"statusCode": 200
}
However when I only return event
exports.handler = async (event) => {
const response = {
statusCode: 200,
body: event <=====
};
return response;
};
I get
Response:
{
"statusCode": 200,
"body": {
"key1": "value1",
"key2": "value2",
"key3": "value3"
}
}
I'm using node 8.10. Does anybody knows what I'm doing wrong here?
The test event in to Lambda console is exactly what you get as the event parameter in your Lambda handler. When you put {"a":1}, you get {"a":1}.
You can simulate a different event types of AWS service (SNS, S3, API Gateway) selecting a template from the combobox.
As you are returning a HTTP response, you want probably to simulate an API Gateway event, it could look like this:
{
"body": "{\"a\":1}",
"pathParameters": {
"id": "XXX"
},
"resource": "/myres",
"path": "/myres",
"httpMethod": "GET",
"isBase64Encoded": true,
"requestContext": {
"authorizer": {
"tenantId": "TEST"
},
"accountId": "123456789012",
"resourceId": "123456",
"stage": "test",
"requestId": "test-request-id",
"requestTime": "09/Apr/2015:12:34:56 +0000",
"requestTimeEpoch": 1428582896000,
"path": "/myres",
"resourcePath": "/myres,
"httpMethod": "GET",
"apiId": "1234567890",
"protocol": "HTTP/1.1"
}
}
Then you will get the body in event.body as JSON string - you can convert it into an object by JSON.parse(event.body).
When returning, you have to serialize the response body with JSON.stringify:
return {
statusCode: 200,
body: JSON.stingify({your:'object'})
};
Change
exports.handler = async (event) => {
const response = {
statusCode: 200,
body: event.body
};
return response;
};
to
exports.handler = async (event) => {
const response = {
statusCode: 200,
body: JSON.stringify(event.body)
};
return response;
};
The body you return in API Gateway must be stringified, otherwise it doesn't know how to deal with the response.
Related
I have a node API Gateway stack and a Node Lambda. I've been trying to get API gateway to return content-type: application/xml OR application/json depending on the request (returnType=xml or returnType=json).
I have tried adding response models and that didn't work. BinaryTypes didn't work either. I have gotten it to do either application/json OR application/xml but I can't get it to do both. Is what I'm trying to do even possible? or should I create two separate endpoints?
This example always returns application/json.
Here is my lambda:
exports.handler = async function (event, context, callback) {
var format = event.format;
if (!format) {
callback(Error("[BadRequest] missing parameters"));
}
const promise = new Promise(function (resolve, reject) {
https
.get("exampleendpoint.com", (res) => {
let body = "";
res.on("data", (chunk) => {
body += chunk;
});
res.on("end", () => {
var results = JSON.parse(body);
if (format && format.toUpperCase() === "XML") {
var response = {
statusCode: 200,
headers: { "content-type": "application/xml" },
body:
'<?xml version="1.0" encoding="UTF-8"?><result>' +
OBJtoXML(results) +
"</result>",
};
resolve(response);
} else {
var response = {
statusCode: 200,
headers: { "content-type": "application/json" },
body: JSON.stringify(results),
};
resolve(response);
}
});
})
.on("error", (e) => {
var response = {
statusCode: 500,
body: "",
errorMessage: "Error from example.com",
};
resolve(response);
});
});
return promise;
};
Here is my api gateway code:
const epqApi = new gateway.RestApi(this, "restApi", {
restApiName: "resultsApi",
cloudWatchRole: true,
description: "Calls the service for the app",
endpointTypes: [gateway.EndpointType.REGIONAL],
deployOptions: {
stageName: "prod",
loggingLevel: gateway.MethodLoggingLevel.OFF,
dataTraceEnabled: false,
},
});
const epqResource = epqApi.root.addResource("v1");
const epqIntegration: gateway.LambdaIntegration =
new gateway.LambdaIntegration(generatePqsResultFunction, {
proxy: false,
allowTestInvoke: true,
passthroughBehavior: gateway.PassthroughBehavior.NEVER,
contentHandling: gateway.ContentHandling.CONVERT_TO_TEXT,
requestTemplates: {
"application/json": `{
"format":"$input.params(\'format\')"
}`,
},
integrationResponses: [
{
statusCode: "200",
responseParameters: {
"method.response.header.Access-Control-Allow-Origin": "'*'",
},
responseTemplates: {
"application/json": "$input.path('$.body')",
"application/xml": "$input.path('$.body')",
},
},
{
statusCode: "400",
selectionPattern: "^\\[BadRequest\\].*",
responseParameters: {
"method.response.header.Access-Control-Allow-Origin": "'*'",
},
responseTemplates: {
"application/javascript":
"#set($inputRoot = $input.path('$')) {\"errorMessage\" : \"$input.path('$.errorMessage')\"}",
},
},
],
});
epqResource.addMethod("GET", epqIntegration, {
requestParameters: {
//all params need to be in here, even if they are not required
"method.request.querystring.x": false,
"method.request.querystring.y": false,
"method.request.querystring.units": false,
"method.request.querystring.format": false,
"method.request.querystring.wkid": false,
"method.request.querystring.includeDate": false,
},
methodResponses: [
// Successful response from the integration
{
statusCode: "200",
responseParameters: {
"method.response.header.Access-Control-Allow-Origin": true,
},
},
{
statusCode: "400",
responseParameters: {
"method.response.header.Access-Control-Allow-Origin": true,
},
},
],
});
}
Is long polling available on GCP PubSub JS SDK?
I want to be able to process multiple PubSub messages at once for example:
There are 1000 messages being sent to the topic every in a span of 1 minute.
Given that, for the next 10 secs, there will be 50 messages to be sent to a topic. I want my subscription to a long poll for 10 secs so that instead of processing each message. It will wait for 10 secs and potentially got all the 50messages.
AWS JS SDK has this feature, I was hoping I can do it on GCP as well.
https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/sqs-examples-enable-long-polling.html
This is an example of how it will work on AWS:
The SQS queue have more than 5 messages.
The listener receiveMessage will get 5 messages at once in a single receive. event
// Load the AWS SDK for Node.js
var AWS = require("aws-sdk");
// Set the region
AWS.config.update({ region: "REGION" });
// Set the AWS Region
const REGION = "us-east-1"; //e.g. "us-east-1"
// Set the parameters
const queueURL =
"https://sqs.us-east-1.amazonaws.com/763335115465/long-polling-per-message"; // SQS_QUEUE_URL
const params = {
AttributeNames: ["SentTimestamp"],
MaxNumberOfMessages: 5,
MessageAttributeNames: ["All"],
QueueUrl: queueURL,
WaitTimeSeconds: 20,
};
// Create SQS service object
const sqs = new AWS.SQS({
region: REGION,
credentials: {
accessKeyId: "xx",
secretAccessKey: "xxx",
},
});
sqs.receiveMessage(params, function (err, data) {
console.log({ err, data: JSON.stringify(data) });
if (err) {
console.log("Receive Error", err);
} else if (data.Messages) {
var deleteParams = {
QueueUrl: queueURL,
ReceiptHandle: data.Messages[0].ReceiptHandle,
};
sqs.deleteMessage(deleteParams, function (err, data) {
if (err) {
console.log("Delete Error", err);
} else {
console.log("Message Deleted", data);
}
});
}
});
{
"ResponseMetadata": { "RequestId": "25295507-c4ae-5106-a499-0d7808c163b8" },
"Messages": [
{
"MessageId": "5dbd863e-2c50-49c8-9c4b-9f70e8db8d17",
"ReceiptHandle": "asdf",
"MD5OfBody": "78ef53e38c997c445f2fe1cc63c13139",
"Body": "Test5",
"Attributes": { "SentTimestamp": "1610991641728" }
},
{
"MessageId": "09baf624-f2ee-4173-83ed-e74c0516a7e6",
"ReceiptHandle": "asdf",
"MD5OfBody": "c454552d52d55d3ef56408742887362b",
"Body": "Test2",
"Attributes": { "SentTimestamp": "1610991983369" }
},
{
"MessageId": "1cac914f-d946-434a-87a0-974b14cc2eba",
"ReceiptHandle": "asdf",
"MD5OfBody": "b3f66ec1535de7702c38e94408fa4a17",
"Body": "Test3",
"Attributes": { "SentTimestamp": "1610991986299" }
},
{
"MessageId": "95c2c8ad-fc7a-451a-b967-8ce1736a4cab",
"ReceiptHandle": "asdf",
"MD5OfBody": "f178860b5109214d9f3debe19a7800d3",
"Body": "Test7",
"Attributes": { "SentTimestamp": "1610991998129" }
},
{
"MessageId": "3711fa29-9bbc-418d-a35f-7adbd7daa952",
"ReceiptHandle": "asd",
"MD5OfBody": "b6e30158b9d7d2dc8bb4f4123fe93c9b",
"Body": "Test10",
"Attributes": { "SentTimestamp": "1610992008975" }
}
]
}
The Cloud Pub/Sub JS library provides a streaming, per-message API for receiving messages from subscriptions. There isn't a way to tell the library to give you a batch of N messages, so you'll have to implement it yourself.
Yes, you have it and it's documented here. Set the value of the timeout that you want to cancel the subscription. So that, if you want to wait 10 more seconds, add 10.000 millis!
I am creating an API to make GET and POST request to a table in DynamoDB.
I deployed it using serverless and received the endpoints for each API type.
But when testing it out with Postman I get the following error:
Bad request. We can't connect to the server for this app or website at this time. There might be too much traffic or a configuration error. Try again later, or contact the app or website owner.
If you provide content to customers through CloudFront, you can find steps to troubleshoot and help prevent this error by reviewing the CloudFront documentation.
Code for creating the data in the table:
const postsTable = process.env.POSTS_TABLE;
// Create a response
function response(statusCode, message) {
return {
statusCode: statusCode,
body: JSON.stringify(message)
};
}
// Create a post
module.exports.createPost = (event, context, callback) => {
const reqBody = JSON.parse(event.body);
if (
!reqBody.title ||
reqBody.title.trim() === "" ||
!reqBody.body ||
reqBody.body.trim() === ""
) {
return callback(
null,
response(400, {
error:
"Post must have a title and body and they must not be empty"
})
);
}
const post = {
id: uuidv4(),
createdAt: new Date().toISOString(),
userId: 1,
title: reqBody.title,
body: reqBody.body
};
return db
.put({
TableName: postsTable,
Item: post
})
.promise()
.then(() => {
callback(null, response(201, post));
})
.catch(err => response(null, response(err.statusCode, err)));
};
I managed to do it but did not use Serverless.
I set up Lambda functions to POST and GET the data from a url.
I think the issue previously was to do with the policies. This time when making the Lambda functions I set it as the following:
I clicked on "Create a new role from AWS policy templates" while creating an execution role for a new function, then selected "Simple microservice permissions" for Policy templates. This added Basic execution role policy and below DynamoDB permissions to the role for all the tables in the same region as the function :
"Action": [
"dynamodb:DeleteItem",
"dynamodb:GetItem",
"dynamodb:PutItem",
"dynamodb:Scan",
"dynamodb:UpdateItem"
]
Lambda function for POST request
exports.handler = async (event, context) => {
const ddb = new AWS.DynamoDB({ apiVersion: "2012-10-08" });
const documentClient = new AWS.DynamoDB.DocumentClient({
region: "ap-southeast-1"
});
let responseBody = "";
let statusCode = 0;
const {
deviceId,
batteryLevel,
eventId,
id,
location,
tags,
time
} = JSON.parse(event.body);
const params = {
TableName: "dashboard",
Item: {
batteryLevel: batteryLevel,
deviceId: deviceId,
eventId: eventId,
location: location,
tags: tags,
time: time
}
};
try {
const data = await documentClient.put(params).promise();
responseBody = JSON.stringify(data);
statusCode = 201;
} catch (err) {
responseBody = "Unable to POST data";
statusCode = 403;
}
const response = {
statusCode: statusCode,
headers: {
myHeader: "test"
},
body: responseBody
};
return response;
};
Other issues as well were with the method execution of the API I needed to set a custom model for the Request Body to match my data:
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "DashboardInputModel",
"type": "object",
"properties":
{
"batteryLevel": {"type": "string"},
"deviceId": {"type": "string"},
"eventId": {"type": "string"},
"id": {"type": "number"},
"location": {
"type": "object",
"properties":{
"accuracy": {"type": "number"},
"latitude": {"type": "number"},
"longitude": {"type": "number"}
}
},
"tags": {
"type": "array",
"items": {
"type": "object",
"properties": {
"accelX":{"type": "number"},
"accelY": {"type": "number"},
"accelZ": {"type": "number"},
"createDate": {"type": "string"},
"dataFormat":{"type": "number"},
"defaultBackground": {"type": "number"},
"favorite": {"type": "boolean"},
"humidity": {"type": "number"},
"id": {"type": "string"},
"measurementSequenceNumber": {"type": "number"},
"movementCounter": {"type": "number"},
"name": {"type": "string"},
"pressure": {"type": "number"},
"rssi": {"type": "number"},
"temperature": {"type": "number"},
"txPower":{"type": "number"},
"updateAt": {"type": "string"},
"voltage": {"type": "number"}
}
}
},
"time": {"type": "string"}
}
}
For each action I also enabled CORS and replaced the existing CORS headers.
These two videos explains the entire process much better than the documentation and I hope it helps.
Part 1
Part 2
By bad request do you mean Status Code 400? It could simply be that you are not correctly calling your API.
If you are getting a 403 then you need to pass through that you are authorised to access the resource you are trying to get. You can see how to do this through the AWS docs.
This page includes a link to an example.
List of error codes.
I am trying to get data from my DynamoDB table called dashboard so I am testing out the Lambda function with a sample from the table.
But all I am getting back from the test is :
Response:
{
"statusCode": 200,
"body": "\"Hello from Lambda!\""
}
It should just return the data from the table that matches it based on the ID as that is what I use to partition the table.
Dashboard example data which is also the test I made
{
"batteryLevel": 35,
"deviceId": "yxftd9pnitd-156xhref9g69a",
"eventId": "c07e3f9f-f6bb-4792-be6f-a9be95cdff38",
"id": 12345,
"location": {
"accuracy": 35.369,
"latitude": 55.8256671,
"longitude": 37.5962931
},
"tags": [
{
"accelX": 0.012,
"accelY": -0.004,
"accelZ": 1.008,
"createDate": "2020-08-11T18:51:58+0300",
"dataFormat": 5,
"defaultBackground": 2,
"favorite": true,
"humidity": 32.8425,
"id": "E5:F1:98:34:C0:0F",
"measurementSequenceNumber": 62865,
"movementCounter": 21,
"name": "Kitchen",
"pressure": 98702,
"rssi": -43,
"temperature": 25.58,
"txPower": 4,
"updateAt": "2020-08-18T19:57:48+0300",
"voltage": 3.013
}
],
"time": "2020-08-18T19:57:48+0300"
}
Lambda Function
"use strict";
const AWS = require("aws-sdk");
AWS.config.update({ region: "ap-southeast-1" });
exports.handler = async (event, context) => {
const ddb = new AWS.DynamoDB({ apiVersion: "2012-10-08" });
const documentClient = new AWS.DynamoDB.DocumentClient({ region: "ap-southeast-1"});
const params = {
TableName: "dashboard",
Key: {
id: 12345
}
};
try {
const data = await documentClient.get(params);
console.log(data);
} catch (err) {
console.log(err);
}
};
Based on the comments.
The issue was caused by not deploying the function after adding new code. Subsequently, the previously deployed version (i.e. "Hello from Lambda") was being executed.
The solution was to deploy the new function.
Im working on a project with AWS, and I've created an endpoint that I've connected to and SQS queue that then triggers a Lambda function. The Lambda should write data to a database but for some reason it doesn't. When I try to trigger it manually with a request like this
{
"Records": [
{
"messageId": "11d6ee51-4cc7-4302-9e22-7cd8afdaadf5",
"receiptHandle": "AQEBBX8nesZEXmkhsmZeyIE8iQAMig7qw...",
"body": {
"duration": "1230",
"player_1": "UR-da336be50ba9b8e53b8",
"player_2": "UR-a67322a021284404128",
"status_1": 1,
"status_2": 0
},
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1573251510774",
"SequenceNumber": "18849496460467696128",
"MessageGroupId": "1",
"SenderId": "AIDAIO23YVJENQZJOL4VO",
"MessageDeduplicationId": "1",
"ApproximateFirstReceiveTimestamp": "1573251510774"
},
"messageAttributes": {},
"md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:fifo.fifo",
"awsRegion": "us-east-2"
},
{
"messageId": "11d6ee51-4cc7-4302-9e22-7cd8afdaadf5",
"receiptHandle": "AQEBBX8nesZEXmkhsmZeyIE8iQAMig7qw...",
"body": {
"duration": "5510",
"player_1": "UR-da336be50ba9b8e53b8",
"player_2": "UR-a67322a021284404128",
"status_1": 1,
"status_2": 0
},
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1573251510774",
"SequenceNumber": "18849496460467696128",
"MessageGroupId": "1",
"SenderId": "AIDAIO23YVJENQZJOL4VO",
"MessageDeduplicationId": "1",
"ApproximateFirstReceiveTimestamp": "1573251510774"
},
"messageAttributes": {},
"md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:fifo.fifo",
"awsRegion": "us-east-2"
}
]
}
It works fine, but when it get called from SQS nothing happens and it deletes the message from SQS.
The code for my Lambda:
const { Client } = require("pg");
const client = new Client({
user: process.env.POSTGRES_USER,
host: process.env.POSTGRES_HOST,
database: process.env.POSTGRES_DATABASE,
password: process.env.POSTGRES_PASSWORD,
port: parseInt(process.env.POSTGRES_PORT),
});
client.connect();
async function asyncForEach(array, callback) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
exports.handler = async (event) => {
try {
await asyncForEach(event.Records, async (record) => {
//Writes the game info to the DB
const result = await client.query(
`INSERT INTO game_data (duration) VALUES (
${record.body.duration}
) RETURNING game_id`
);
const res = await Promise.all([
client.query(
`INSERT INTO player_game_data (user_id,game_id,player_game_status) VALUES (
'${record.body.player_1}',
'${result.rows[0].game_id}',
${record.body.status_1}
)`
),
client.query(
`INSERT INTO player_game_data (user_id,game_id,player_game_status) VALUES (
'${record.body.player_2}',
'${result.rows[0].game_id}',
${record.body.status_2}
)`
),
]);
}
);
return{
statusCode: 200}
} catch (error) {
return {
statusCode: 400,
error: error.message,
};
}
};
I've tested the queue and it works fine so the problem is probably somewhere here..
The problem is that the JSON you send to SQS gets converted to a string so you need to convert it back to a JSON in your Lambda using JSON.parse().
The request that comes through SQS looks something like this:
{
"Records": [
{
"messageId": "bc62a976-06ef-4727-8bb5-8d7b0a474f7d",
"receiptHandle": "AQEBd4oxuMTytPn8AWORy992aGqGO5By+pM1x2dtZpyn0n8cxTJEd9/BXemUnAAbU+tx1jRlsRWCYhPnrrBvCj91nUpw5gT10WGkuQcv6fCH+ePqqON6sIHy9+8csqhzCwphDqdA23SLfidEGMwuW8mvNN+Lh541vfgHoYSQhMv51qLjHADbiSUzfsIYVnvmqU+C3D55OX/OhDOJoWY87XIEjpSEqRKx4s8wTF6edpYyun0IBYUA68W5CFkg+RBuWPeKsGLNENCvCpawcknYOCKrxeMrWRTh73qHZzH6QnNTO5S4fzQONKH2MWjFsIy7T01w1feNSD3qt/m3vakWhQnhi8VDn9KUJCIdKbhxpdqZB3QSPAKvfjRtEkwXQu2pGUpezMtWbNmsQfaEw84+7BV/CQ==",
"body": "{\r\n \"duration\": \"69\",\r\n \"player_1\": \"UR-da336be50ba9b8e53b8\",\r\n \"player_2\": \"UR-a67322a021284404128\",\r\n \"status_1\": 0,\r\n \"status_2\": 1\r\n}",
"attributes": {
"ApproximateReceiveCount": "1",
"AWSTraceHeader": "Root=1-5f454d30-8772a1ac004584ac5e0cbf48",
"SentTimestamp": "1598377264745",
"SenderId": "AROAYLJPJR5FJH6OQCRDF:BackplaneAssumeRoleSession",
"ApproximateFirstReceiveTimestamp": "1598377264750"
},
"messageAttributes": {},
"md5OfBody": "0be85f29f6c6dd29e328b58a01e3db2a",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:us-east-1:574014197578:dev-Post-GameResults",
"awsRegion": "us-east-1"
}
]
}
After I parsed the body everything worked as intended..
Code after edit:
const { Client } = require("pg");
const client = new Client({
user: process.env.POSTGRES_USER,
host: process.env.POSTGRES_HOST,
database: process.env.POSTGRES_DATABASE,
password: process.env.POSTGRES_PASSWORD,
port: parseInt(process.env.POSTGRES_PORT),
});
client.connect();
async function asyncForEach(array, callback) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
exports.handler = async (event) => {
try {
await asyncForEach(event.Records, async (record) => {
var body = JSON.parse(record.body);
//Writes the game info to the DB
const result = await client.query(
`INSERT INTO game_data (duration) VALUES (${body.duration}) RETURNING game_id`
);
const res = await Promise.all([
client.query(
`INSERT INTO player_game_data (user_id,game_id,player_game_status) VALUES (
'${body.player_1}',
'${result.rows[0].game_id}',
${body.status_1}
)`
),
client.query(
`INSERT INTO player_game_data (user_id,game_id,player_game_status) VALUES (
'${body.player_2}',
'${result.rows[0].game_id}',
${body.status_2}
)`
),
]);
}
);
return{
statusCode: 200
}
} catch (error) {
return {
statusCode: 400,
error: error.message,
};
}
};