GCP PubSub long polling - google-cloud-platform

Is long polling available on GCP PubSub JS SDK?
I want to be able to process multiple PubSub messages at once for example:
There are 1000 messages being sent to the topic every in a span of 1 minute.
Given that, for the next 10 secs, there will be 50 messages to be sent to a topic. I want my subscription to a long poll for 10 secs so that instead of processing each message. It will wait for 10 secs and potentially got all the 50messages.
AWS JS SDK has this feature, I was hoping I can do it on GCP as well.
https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/sqs-examples-enable-long-polling.html
This is an example of how it will work on AWS:
The SQS queue have more than 5 messages.
The listener receiveMessage will get 5 messages at once in a single receive. event
// Load the AWS SDK for Node.js
var AWS = require("aws-sdk");
// Set the region
AWS.config.update({ region: "REGION" });
// Set the AWS Region
const REGION = "us-east-1"; //e.g. "us-east-1"
// Set the parameters
const queueURL =
"https://sqs.us-east-1.amazonaws.com/763335115465/long-polling-per-message"; // SQS_QUEUE_URL
const params = {
AttributeNames: ["SentTimestamp"],
MaxNumberOfMessages: 5,
MessageAttributeNames: ["All"],
QueueUrl: queueURL,
WaitTimeSeconds: 20,
};
// Create SQS service object
const sqs = new AWS.SQS({
region: REGION,
credentials: {
accessKeyId: "xx",
secretAccessKey: "xxx",
},
});
sqs.receiveMessage(params, function (err, data) {
console.log({ err, data: JSON.stringify(data) });
if (err) {
console.log("Receive Error", err);
} else if (data.Messages) {
var deleteParams = {
QueueUrl: queueURL,
ReceiptHandle: data.Messages[0].ReceiptHandle,
};
sqs.deleteMessage(deleteParams, function (err, data) {
if (err) {
console.log("Delete Error", err);
} else {
console.log("Message Deleted", data);
}
});
}
});
{
"ResponseMetadata": { "RequestId": "25295507-c4ae-5106-a499-0d7808c163b8" },
"Messages": [
{
"MessageId": "5dbd863e-2c50-49c8-9c4b-9f70e8db8d17",
"ReceiptHandle": "asdf",
"MD5OfBody": "78ef53e38c997c445f2fe1cc63c13139",
"Body": "Test5",
"Attributes": { "SentTimestamp": "1610991641728" }
},
{
"MessageId": "09baf624-f2ee-4173-83ed-e74c0516a7e6",
"ReceiptHandle": "asdf",
"MD5OfBody": "c454552d52d55d3ef56408742887362b",
"Body": "Test2",
"Attributes": { "SentTimestamp": "1610991983369" }
},
{
"MessageId": "1cac914f-d946-434a-87a0-974b14cc2eba",
"ReceiptHandle": "asdf",
"MD5OfBody": "b3f66ec1535de7702c38e94408fa4a17",
"Body": "Test3",
"Attributes": { "SentTimestamp": "1610991986299" }
},
{
"MessageId": "95c2c8ad-fc7a-451a-b967-8ce1736a4cab",
"ReceiptHandle": "asdf",
"MD5OfBody": "f178860b5109214d9f3debe19a7800d3",
"Body": "Test7",
"Attributes": { "SentTimestamp": "1610991998129" }
},
{
"MessageId": "3711fa29-9bbc-418d-a35f-7adbd7daa952",
"ReceiptHandle": "asd",
"MD5OfBody": "b6e30158b9d7d2dc8bb4f4123fe93c9b",
"Body": "Test10",
"Attributes": { "SentTimestamp": "1610992008975" }
}
]
}

The Cloud Pub/Sub JS library provides a streaming, per-message API for receiving messages from subscriptions. There isn't a way to tell the library to give you a batch of N messages, so you'll have to implement it yourself.

Yes, you have it and it's documented here. Set the value of the timeout that you want to cancel the subscription. So that, if you want to wait 10 more seconds, add 10.000 millis!

Related

Add data from AWS Cloudwatch to Kinesis

I have data in CloudWatch logs, where were sent ip addresses.
For example,
{ "requestId":"test", "ip": "0.0.0.0", "requestTime":"24/May/2022", "httpMethod":"POST", ...}
Also, I have Data Stream in Kinesis, where were sent data with status 200
{
"Records": [
{
"SequenceNumber": "test",
"Data": {
test
},
"PartitionKey": "test"
}
],
"NextShardIterator": "test"
"MillisBehindLatest": 0,
"ResponseMetadata": {
"RequestId": "test",
"HTTPStatusCode": 200,
"HTTPHeaders": {
...
},
"RetryAttempts": 1
}
}
Is it possible to add IP for each record?

AWS - How to get Cloudfront metrics using Javascript SDK

I am trying to get Cloudfront metrics using the JS SDK for AWS but I am not getting anything back.
I am not sure what I am doing wrong but I have isolated this NOT to be with:
Permissions (gave it a full admin account for testing purposes)
Region. North Virginia (for CloudFront)
Basic params like: StartDate, EndDate, DistributionID
My code is as below (simplified):
var AWS = require('aws-sdk');
AWS.config.update({
accessKeyId: "accessKeyId",
secretAccessKey: "secretAccessKey",
apiVersion: '2017-10-25',
region: 'us-east-1'
});
var cloudwatchmetrics = new AWS.CloudWatch();
var cloudFrontParams = {
"StartTime": 1518867432,
"EndTime": 1518868032,
"MetricDataQueries": [
{
"Id": "m1",
"MetricStat": {
"Metric": {
"Dimensions": [
{
"Name": "DistributionId",
"Value": "ValueOfDistribution"
},
{
"Name": "Region",
"Value": "Global"
}
],
"MetricName": "Requests",
"Namespace": "AWS/CloudFront"
},
"Stat": "Sum",
"Period": 3600
},
"ReturnData": true
}
]
};
cloudwatchmetrics.getMetricData(cloudFrontParams, function (err, data) {
if (err) {
console.log(err);
}else{
console.log(JSON.stringify(data));
}
});
This is what I get back (it's not erroring out):
{
"ResponseMetadata":{
"RequestId":"xxxxxxxxxxxxxxxxxxxxx"
},
"MetricDataResults":[
{
"Id":"m1",
"Label":"Requests",
"Timestamps":[
],
"Values":[
],
"StatusCode":"Complete",
"Messages":[
]
}
]
}
The issue was with the StartTime, it was too far back in time. What I have in the post translates to: Saturday, February 17, 2018
Hopefully this helps someone someday.

Unable to send GET request with AWS Lambda & DynamoDB Rest API using serverless

I am creating an API to make GET and POST request to a table in DynamoDB.
I deployed it using serverless and received the endpoints for each API type.
But when testing it out with Postman I get the following error:
Bad request. We can't connect to the server for this app or website at this time. There might be too much traffic or a configuration error. Try again later, or contact the app or website owner.
If you provide content to customers through CloudFront, you can find steps to troubleshoot and help prevent this error by reviewing the CloudFront documentation.
Code for creating the data in the table:
const postsTable = process.env.POSTS_TABLE;
// Create a response
function response(statusCode, message) {
return {
statusCode: statusCode,
body: JSON.stringify(message)
};
}
// Create a post
module.exports.createPost = (event, context, callback) => {
const reqBody = JSON.parse(event.body);
if (
!reqBody.title ||
reqBody.title.trim() === "" ||
!reqBody.body ||
reqBody.body.trim() === ""
) {
return callback(
null,
response(400, {
error:
"Post must have a title and body and they must not be empty"
})
);
}
const post = {
id: uuidv4(),
createdAt: new Date().toISOString(),
userId: 1,
title: reqBody.title,
body: reqBody.body
};
return db
.put({
TableName: postsTable,
Item: post
})
.promise()
.then(() => {
callback(null, response(201, post));
})
.catch(err => response(null, response(err.statusCode, err)));
};
I managed to do it but did not use Serverless.
I set up Lambda functions to POST and GET the data from a url.
I think the issue previously was to do with the policies. This time when making the Lambda functions I set it as the following:
I clicked on "Create a new role from AWS policy templates" while creating an execution role for a new function, then selected "Simple microservice permissions" for Policy templates. This added Basic execution role policy and below DynamoDB permissions to the role for all the tables in the same region as the function :
"Action": [
"dynamodb:DeleteItem",
"dynamodb:GetItem",
"dynamodb:PutItem",
"dynamodb:Scan",
"dynamodb:UpdateItem"
]
Lambda function for POST request
exports.handler = async (event, context) => {
const ddb = new AWS.DynamoDB({ apiVersion: "2012-10-08" });
const documentClient = new AWS.DynamoDB.DocumentClient({
region: "ap-southeast-1"
});
let responseBody = "";
let statusCode = 0;
const {
deviceId,
batteryLevel,
eventId,
id,
location,
tags,
time
} = JSON.parse(event.body);
const params = {
TableName: "dashboard",
Item: {
batteryLevel: batteryLevel,
deviceId: deviceId,
eventId: eventId,
location: location,
tags: tags,
time: time
}
};
try {
const data = await documentClient.put(params).promise();
responseBody = JSON.stringify(data);
statusCode = 201;
} catch (err) {
responseBody = "Unable to POST data";
statusCode = 403;
}
const response = {
statusCode: statusCode,
headers: {
myHeader: "test"
},
body: responseBody
};
return response;
};
Other issues as well were with the method execution of the API I needed to set a custom model for the Request Body to match my data:
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "DashboardInputModel",
"type": "object",
"properties":
{
"batteryLevel": {"type": "string"},
"deviceId": {"type": "string"},
"eventId": {"type": "string"},
"id": {"type": "number"},
"location": {
"type": "object",
"properties":{
"accuracy": {"type": "number"},
"latitude": {"type": "number"},
"longitude": {"type": "number"}
}
},
"tags": {
"type": "array",
"items": {
"type": "object",
"properties": {
"accelX":{"type": "number"},
"accelY": {"type": "number"},
"accelZ": {"type": "number"},
"createDate": {"type": "string"},
"dataFormat":{"type": "number"},
"defaultBackground": {"type": "number"},
"favorite": {"type": "boolean"},
"humidity": {"type": "number"},
"id": {"type": "string"},
"measurementSequenceNumber": {"type": "number"},
"movementCounter": {"type": "number"},
"name": {"type": "string"},
"pressure": {"type": "number"},
"rssi": {"type": "number"},
"temperature": {"type": "number"},
"txPower":{"type": "number"},
"updateAt": {"type": "string"},
"voltage": {"type": "number"}
}
}
},
"time": {"type": "string"}
}
}
For each action I also enabled CORS and replaced the existing CORS headers.
These two videos explains the entire process much better than the documentation and I hope it helps.
Part 1
Part 2
By bad request do you mean Status Code 400? It could simply be that you are not correctly calling your API.
If you are getting a 403 then you need to pass through that you are authorised to access the resource you are trying to get. You can see how to do this through the AWS docs.
This page includes a link to an example.
List of error codes.

AWS Lambda test does not return data from DynamoDB table

I am trying to get data from my DynamoDB table called dashboard so I am testing out the Lambda function with a sample from the table.
But all I am getting back from the test is :
Response:
{
"statusCode": 200,
"body": "\"Hello from Lambda!\""
}
It should just return the data from the table that matches it based on the ID as that is what I use to partition the table.
Dashboard example data which is also the test I made
{
"batteryLevel": 35,
"deviceId": "yxftd9pnitd-156xhref9g69a",
"eventId": "c07e3f9f-f6bb-4792-be6f-a9be95cdff38",
"id": 12345,
"location": {
"accuracy": 35.369,
"latitude": 55.8256671,
"longitude": 37.5962931
},
"tags": [
{
"accelX": 0.012,
"accelY": -0.004,
"accelZ": 1.008,
"createDate": "2020-08-11T18:51:58+0300",
"dataFormat": 5,
"defaultBackground": 2,
"favorite": true,
"humidity": 32.8425,
"id": "E5:F1:98:34:C0:0F",
"measurementSequenceNumber": 62865,
"movementCounter": 21,
"name": "Kitchen",
"pressure": 98702,
"rssi": -43,
"temperature": 25.58,
"txPower": 4,
"updateAt": "2020-08-18T19:57:48+0300",
"voltage": 3.013
}
],
"time": "2020-08-18T19:57:48+0300"
}
Lambda Function
"use strict";
const AWS = require("aws-sdk");
AWS.config.update({ region: "ap-southeast-1" });
exports.handler = async (event, context) => {
const ddb = new AWS.DynamoDB({ apiVersion: "2012-10-08" });
const documentClient = new AWS.DynamoDB.DocumentClient({ region: "ap-southeast-1"});
const params = {
TableName: "dashboard",
Key: {
id: 12345
}
};
try {
const data = await documentClient.get(params);
console.log(data);
} catch (err) {
console.log(err);
}
};
Based on the comments.
The issue was caused by not deploying the function after adding new code. Subsequently, the previously deployed version (i.e. "Hello from Lambda") was being executed.
The solution was to deploy the new function.

Write message from SQS to database using Lambda

Im working on a project with AWS, and I've created an endpoint that I've connected to and SQS queue that then triggers a Lambda function. The Lambda should write data to a database but for some reason it doesn't. When I try to trigger it manually with a request like this
{
"Records": [
{
"messageId": "11d6ee51-4cc7-4302-9e22-7cd8afdaadf5",
"receiptHandle": "AQEBBX8nesZEXmkhsmZeyIE8iQAMig7qw...",
"body": {
"duration": "1230",
"player_1": "UR-da336be50ba9b8e53b8",
"player_2": "UR-a67322a021284404128",
"status_1": 1,
"status_2": 0
},
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1573251510774",
"SequenceNumber": "18849496460467696128",
"MessageGroupId": "1",
"SenderId": "AIDAIO23YVJENQZJOL4VO",
"MessageDeduplicationId": "1",
"ApproximateFirstReceiveTimestamp": "1573251510774"
},
"messageAttributes": {},
"md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:fifo.fifo",
"awsRegion": "us-east-2"
},
{
"messageId": "11d6ee51-4cc7-4302-9e22-7cd8afdaadf5",
"receiptHandle": "AQEBBX8nesZEXmkhsmZeyIE8iQAMig7qw...",
"body": {
"duration": "5510",
"player_1": "UR-da336be50ba9b8e53b8",
"player_2": "UR-a67322a021284404128",
"status_1": 1,
"status_2": 0
},
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1573251510774",
"SequenceNumber": "18849496460467696128",
"MessageGroupId": "1",
"SenderId": "AIDAIO23YVJENQZJOL4VO",
"MessageDeduplicationId": "1",
"ApproximateFirstReceiveTimestamp": "1573251510774"
},
"messageAttributes": {},
"md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:fifo.fifo",
"awsRegion": "us-east-2"
}
]
}
It works fine, but when it get called from SQS nothing happens and it deletes the message from SQS.
The code for my Lambda:
const { Client } = require("pg");
const client = new Client({
user: process.env.POSTGRES_USER,
host: process.env.POSTGRES_HOST,
database: process.env.POSTGRES_DATABASE,
password: process.env.POSTGRES_PASSWORD,
port: parseInt(process.env.POSTGRES_PORT),
});
client.connect();
async function asyncForEach(array, callback) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
exports.handler = async (event) => {
try {
await asyncForEach(event.Records, async (record) => {
//Writes the game info to the DB
const result = await client.query(
`INSERT INTO game_data (duration) VALUES (
${record.body.duration}
) RETURNING game_id`
);
const res = await Promise.all([
client.query(
`INSERT INTO player_game_data (user_id,game_id,player_game_status) VALUES (
'${record.body.player_1}',
'${result.rows[0].game_id}',
${record.body.status_1}
)`
),
client.query(
`INSERT INTO player_game_data (user_id,game_id,player_game_status) VALUES (
'${record.body.player_2}',
'${result.rows[0].game_id}',
${record.body.status_2}
)`
),
]);
}
);
return{
statusCode: 200}
} catch (error) {
return {
statusCode: 400,
error: error.message,
};
}
};
I've tested the queue and it works fine so the problem is probably somewhere here..
The problem is that the JSON you send to SQS gets converted to a string so you need to convert it back to a JSON in your Lambda using JSON.parse().
The request that comes through SQS looks something like this:
{
"Records": [
{
"messageId": "bc62a976-06ef-4727-8bb5-8d7b0a474f7d",
"receiptHandle": "AQEBd4oxuMTytPn8AWORy992aGqGO5By+pM1x2dtZpyn0n8cxTJEd9/BXemUnAAbU+tx1jRlsRWCYhPnrrBvCj91nUpw5gT10WGkuQcv6fCH+ePqqON6sIHy9+8csqhzCwphDqdA23SLfidEGMwuW8mvNN+Lh541vfgHoYSQhMv51qLjHADbiSUzfsIYVnvmqU+C3D55OX/OhDOJoWY87XIEjpSEqRKx4s8wTF6edpYyun0IBYUA68W5CFkg+RBuWPeKsGLNENCvCpawcknYOCKrxeMrWRTh73qHZzH6QnNTO5S4fzQONKH2MWjFsIy7T01w1feNSD3qt/m3vakWhQnhi8VDn9KUJCIdKbhxpdqZB3QSPAKvfjRtEkwXQu2pGUpezMtWbNmsQfaEw84+7BV/CQ==",
"body": "{\r\n \"duration\": \"69\",\r\n \"player_1\": \"UR-da336be50ba9b8e53b8\",\r\n \"player_2\": \"UR-a67322a021284404128\",\r\n \"status_1\": 0,\r\n \"status_2\": 1\r\n}",
"attributes": {
"ApproximateReceiveCount": "1",
"AWSTraceHeader": "Root=1-5f454d30-8772a1ac004584ac5e0cbf48",
"SentTimestamp": "1598377264745",
"SenderId": "AROAYLJPJR5FJH6OQCRDF:BackplaneAssumeRoleSession",
"ApproximateFirstReceiveTimestamp": "1598377264750"
},
"messageAttributes": {},
"md5OfBody": "0be85f29f6c6dd29e328b58a01e3db2a",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:us-east-1:574014197578:dev-Post-GameResults",
"awsRegion": "us-east-1"
}
]
}
After I parsed the body everything worked as intended..
Code after edit:
const { Client } = require("pg");
const client = new Client({
user: process.env.POSTGRES_USER,
host: process.env.POSTGRES_HOST,
database: process.env.POSTGRES_DATABASE,
password: process.env.POSTGRES_PASSWORD,
port: parseInt(process.env.POSTGRES_PORT),
});
client.connect();
async function asyncForEach(array, callback) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
exports.handler = async (event) => {
try {
await asyncForEach(event.Records, async (record) => {
var body = JSON.parse(record.body);
//Writes the game info to the DB
const result = await client.query(
`INSERT INTO game_data (duration) VALUES (${body.duration}) RETURNING game_id`
);
const res = await Promise.all([
client.query(
`INSERT INTO player_game_data (user_id,game_id,player_game_status) VALUES (
'${body.player_1}',
'${result.rows[0].game_id}',
${body.status_1}
)`
),
client.query(
`INSERT INTO player_game_data (user_id,game_id,player_game_status) VALUES (
'${body.player_2}',
'${result.rows[0].game_id}',
${body.status_2}
)`
),
]);
}
);
return{
statusCode: 200
}
} catch (error) {
return {
statusCode: 400,
error: error.message,
};
}
};