Is there are way to push data from a Lambda function to a Kinesis stream? I have searched the internet but have not found any examples related to it.
Thanks.
Yes, you can send information from Lambda to Kinesis Stream and it is very simple to do. Make sure you are running Lambda with the right permissions.
Create a file called kinesis.js, This file will provide a 'save' function that receives a payload and sends it to the Kinesis Stream. We want to be able to include this 'save' function anywhere we want to send data to the stream. Code:
const AWS = require('aws-sdk');
const kinesisConstant = require('./kinesisConstants'); //Keep it consistent
const kinesis = new AWS.Kinesis({
apiVersion: kinesisConstant.API_VERSION, //optional
//accessKeyId: '<you-can-use-this-to-run-it-locally>', //optional
//secretAccessKey: '<you-can-use-this-to-run-it-locally>', //optional
region: kinesisConstant.REGION
});
const savePayload = (payload) => {
//We can only save strings into the streams
if( typeof payload !== kinesisConstant.PAYLOAD_TYPE) {
try {
payload = JSON.stringify(payload);
} catch (e) {
console.log(e);
}
}
let params = {
Data: payload,
PartitionKey: kinesisConstant.PARTITION_KEY,
StreamName: kinesisConstant.STREAM_NAME
};
kinesis.putRecord(params, function(err, data) {
if (err) console.log(err, err.stack);
else console.log('Record added:',data);
});
};
exports.save = (payload) => {
const params = {
StreamName: kinesisConstant.STREAM_NAME,
};
kinesis.describeStream(params, function(err, data) {
if (err) console.log(err, err.stack);
else {
//Make sure stream is able to take new writes (ACTIVE or UPDATING are good)
if(data.StreamDescription.StreamStatus === kinesisConstant.STATE.ACTIVE
|| data.StreamDescription.StreamStatus === kinesisConstant.STATE.UPDATING ) {
savePayload(payload);
} else {
console.log(`Kinesis stream ${kinesisConstant.STREAM_NAME} is ${data.StreamDescription.StreamStatus}.`);
console.log(`Record Lost`, JSON.parse(payload));
}
}
});
};
Create a kinesisConstant.js file to keep it consistent :)
module.exports = {
STATE: {
ACTIVE: 'ACTIVE',
UPDATING: 'UPDATING',
CREATING: 'CREATING',
DELETING: 'DELETING'
},
STREAM_NAME: '<your-stream-name>',
PARTITION_KEY: '<string-value-if-one-shard-anything-will-do',
PAYLOAD_TYPE: 'String',
REGION: '<the-region-where-you-have-lambda-and-kinesis>',
API_VERSION: '2013-12-02'
}
Your handler file: we added the 'done' function to send a response to whoever wants to send the data to the stream but 'kinesis.save(event)' does all the work.
const kinesis = require('./kinesis');
exports.handler = (event, context, callback) => {
console.log('LOADING handler');
const done = (err, res) => callback(null, {
statusCode: err ? '400' : '200',
body: err || res,
headers: {
'Content-Type': 'application/json',
},
});
kinesis.save(event); // here we send it to the stream
done(null, event);
}
This should be done exactly like doing it on your computer.
Here's an example in nodejs:
let aws = require('aws');
let kinesis = new aws.Kinesis();
// data that you'd like to send
let data_object = { "some": "properties" };
let data = JSON.stringify(data_object);
// push data to kinesis
const params = {
Data: data,
PartitionKey: "1",
StreamName: "stream name"
}
kinesis.putRecord(params, (err, data) => {
if (err) console.error(err);
else console.log("data sent");
}
Please note, this piece of code will not work, as the Lambda has no permissions to your stream.
When accessing AWS resources through Lambda, it is better to use IAM roles;
When configuring a new Lambda, you can choose existing / create a role.
Go to IAM, then Roles, and pick the role name you assigned to the Lambda function.
Add the relevant permissions (putRecord, putRecords).
Then, test the Lambda.
Yes, this can be done, I was trying to accomplish the same thing and was able to do so in Lambda using Node.js 4.3 runtime, and it also works in version 6.10.
Here is the code:
Declare the following at the top of your Lambda function:
var AWS = require("aws-sdk");
var kinesis = new AWS.Kinesis();
function writeKinesis(rawdata){
data = JSON.stringify(rawdata);
params = {Data: data, PartitionKey: "<PARTITION_KEY>", StreamName: "<STREAM_NAME>"};
kinesis.putRecord(params, (err, data) => {
if (err) console.error(err);
else console.log("data sent");
});
}
Now, in the exports.handler, call the function:
writeKinesis(<YOUR_DATA>);
A few things to note... for Kinesis to ingest data, it must be encoded. In the example below, I have function that takes logs from CloudWatch, and sends them over to a Kinesis stream.
Note that I'm inserting the contents of buffer.toString('utf8') into the writeKinesis function:
exports.handler = function(input, context) {
...
var zippedInput = new Buffer(input.awslogs.data, 'base64');
zlib.gunzip(zippedInput, function(error, buffer) {
...
writeKinesis(buffer.toString('utf8'));
...
}
...
}
Finally, in IAM, configure the appropriate permissions. Your Lambda function has to run within the context of an IAM role that includes the following permissions below. In my case, I just modified the default lambda_elasticsearch_execution role to include a policy called "lambda_kinesis_execution" with the following code:
"Effect": "Allow",
"Action": [
"kinesis:*"
],
"Resource": [
"<YOUR_STREAM_ARN>"
]
Related
I am trying to scan the Dynamodb table form my following code, can anyone please guide me what is wrong here.
const AWS = require("aws-sdk");
const dynamodb = new AWS.DynamoDB({
region: "eu-west-1",
apiVersion: "2012-08-10"
});
exports.handler = async (event, callback) => {
const params = {
TableName: "job_Status"
};
dynamodb.scan(params, (err, data) => {
if (err) {
console.log(err);
callback(err);
} else {
console.log(data);
callback(null, data);
}
});
};
I have given full dynamodb access role to the function but still it gives me the null response. Any idea what can be wrong here?
Response:
I tried with dynaomClient which not working too.
const AWS = require("aws-sdk");
const db = new AWS.DynamoDB.DocumentClient({
region : 'eu-west-1'
});
exports.handler = async (event, callback) => {
const params = {
TableName: "job_Status"
};
db.scan(params, (err, data) => {
if (err) {
console.log(err);
callback(err);
} else {
console.log(data);
callback(null, data);
}
});
};
Your Lambda function is async but your code uses callbacks. By the time the callback is reached, your function has already been terminated because it ran asynchronously. I'd speculate that the null output you see is the return value from the Lambda function, not your console.log.
Replace your call to scan with the following:
try{
let results = await db.scan(params).promise()
console.log(results);
} catch(err){
console.log(err)
}
For more info, check out the AWS documentation about working with promises.
I have this lambda below that gets items from DynamoDB and returns it to connect just fine... however, I cannot seem to get the inputTranscript over. Is there a way I could implement this in my current lambda so that Connect can access it? I am seriously stumped on this, as everything I have tried in the documentation has not worked for me.
const AWS = require("aws-sdk");
const dynamodb = new AWS.DynamoDB();
exports.handler = function(event, context, callback) {
console.log(`DEBUG:\t Event`, JSON.stringify(event, null, 4));
var params = {
Key: {
"name": {
"S": '"' + event.slots.list + '"'
}
},
TableName: 'ServiceOfferings'
};
dynamodb.getItem(params, function(err, data) {
if (err) {
console.log("ERROR:\t", err);
callback(err);
} else {
console.log(data);
if (data.Item) {
console.log('data returned from DynamoDB ', JSON.stringify(data));
callback(null, {
ServiceOffering: data.Item.name.S.slice(1, -1)
});
}
else {
console.log("no callback number found for intent");
callback(new Error("no callback number found for intent"));
}
}
});
};
The test instance I use to ensure the lambda works correctly is as follows:
{
"dialog-state": "ReadyForFulfillment",
"input-transcript": "my printer is not working",
"slots": {
"list": "Re-IP Project - Printers"
},
"intent-name": "getServiceOffering"
}
The response after Testing this comes out to:
{
"ServiceOffering": "Re-IP Project - Printers"
}
When you log the event, you should be able to see the inputTranscript being passed to your Lambda. So you simply need to take it out of the event and do with it what you want.
const AWS = require("aws-sdk");
const dynamodb = new AWS.DynamoDB();
exports.handler = function(event, context, callback) {
console.log(`DEBUG:\t Event`, JSON.stringify(event, null, 4));
var inputTranscript = event.inputTranscript
...
callback(null, {
ServiceOffering: data.Item.name.S.slice(1, -1),
inputTranscript: inputTranscript
});
That includes the inputTranscript in your callback to Connect (I assume), and you can use the Set Attribute block to save and handle the input within Connect.
I am new to aws-lambda and aws-s3. I am trying to create one microservice using api-gateway, aws s3 and aws lambda.
I have written lambda function to retrive object from s3, but it sends null and not throwing any error. I am not sure what is going wrong.
I have setup role and gave access to s3 and used that role for lambda
const AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event) => {
var params = {
"Bucket": "bucketname",
"Key": "keyname"
};
s3.getObject(params, function(err, data){
if(err) {
return "error while fetching data";
} else {
return data;
}
});
};
What am I doing wrong here?
You should return the data in the proper response format required for API gateway proxy and use the callback parameter to reply, so change your function to look like this:
const AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event, context, callback) => {
var params = {
"Bucket": "bucketname",
"Key": "keyname"
};
s3.getObject(params, function(err, data){
if(err) {
return callback(new Error("error while fetching data"));
} else {
let response = {
statusCode: 200,
headers: {
"x-custom-header" : "my custom header value"
},
body: JSON.stringify(data)
};
return callback(null, response);
}
});
};
If you're not using API gateway proxy Lambda integration, then you can simply change the response to just return callback(null, data);
I have a "main" Lambda function that gets triggered by SNS. It pulls a list of recipients from the database and it needs to send each of them a message based on a template, replacing things like first name and such.
The way I have it setup is I created another Lambda function called "email-send" which is subscribed to "email-send" topic. The "main" Lambda then loops through the recipients list and publishes messages to "email-send" with a proper payload (from, to, subject, message). This might eventually need to process 1000's of emails in a single batch.
Is this a good approach to my requirements? Perhaps Lambda/SNS is not a way to go? If so, what would you recommend.
With this setup I am running into issues when my "main" function finishes running and somehow "sns.publish" does not get triggered in my loop. I assume because I am not letting it finish. But I am not sure how to fix it, being a loop.
Here is the snippet from my Lambda function:
exports.handler = (event, context, callback) => {
// code is here to pull data into "data" array
// process records
for (var i = 0; i < data.length; i++) {
var sns = new aws.SNS();
sns.publish({
Message: JSON.stringify({ from: data[i].from, to: data[i].to, subject: subject, body: body }),
TopicArn: 'arn:aws:sns:us-west-2:XXXXXXXX:email-send'
}, function(err, data) {
if (err) {
console.log(err.stack);
} else {
console.log('SNS pushed!');
}
});
}
context.succeed("success");
};
Thanks for any assistance.
Your code is doing this...
Begin calling sns.publish() 1000 times
Return (through context.succeed())
You didn't wait for those 1000 calls to finish!
What your code should do is...
Begin calling sns.publish() 1000 times
When all calls to sns.publish() has returned, then return. (context.succeed is old so we should use callback() instead).
Something like this...
// Instantiate the client only once instead of data.length times
const sns = new aws.SNS();
exports.handler = (event, context, callback) => {
const snsCalls = []
for (var i = 0; i < data.length; i++) {
snsCalls.push(sns.publish({
Message: JSON.stringify({
from: data[i].from,
to: data[i].to,
subject: subject,
body: body
}),
TopicArn: 'arn:aws:sns:us-west-2:XXXXXXXX:email-send'
}).promise();
}
return Promise.all(snsCalls)
.then(() => callback(null, 'Success'))
.catch(err => callback(err));
};
I think that a better approach is using AWS Lambda API.
That way, you don't need SNS.
For example:
var lambda = new AWS.Lambda({region: AWS_REGION});
function invokeWorkerLambda(task, callback) {
var params = {
FunctionName: WORKER_LAMBDA_NAME,
InvocationType: 'Event',
Payload: JSON.stringify({.....})
};
lambda.invoke(params, function(err, data) {
if (err) {
console.error(err, err.stack);
callback(err);
} else {
callback(null, data);
}
});
}
As you can see, you don't need SNS for lambda function's invocation.
Important: Another suggestion is to create an Array of invocations (functions) and later execute them as follow:
async.parallel(invocations, function(err) {
if (err) {
console.error(err, err.stack);
callback(err);
}
});
Take a look at this link where I got a lot of knowledge about Lambda invocation: https://cloudonaut.io/integrate-sqs-and-lambda-serverless-architecture-for-asynchronous-workloads/
I have a use case of pushing SNS notifications from Amazon lambda. I have written the following code, with the IAM role having the permission to invoke SNS. Even with the Kenesis trigger, this Lambda function is unable to send any notification to SNS. I confirmed this by, subscribing my email id to the SNS.
[EDIT]: Just a follow up question. I now need to query DynamoDB and based on the output, need to call different end point of SNS. Now, when I query DynamoDB, the execution stops after DynamoDB query and not even progressing for SNS checks.
Following is my lambda function code.
console.log('Loading function');
var AWS = require('aws-sdk');
exports.handler = function(event, context) {
event.Records.forEach(function(record) {
//var payload = new Buffer(record.kinesis.data, 'base64').toString('ascii');
var payload = record.kinesis.data;
console.log('Decoded payload:', payload);
var dynamodb = new AWS.DynamoDB();
var dynamodb_params = {
Key: {
dataSource: {
S: payload
}
},
TableName: 'TableName',
AttributesToGet: [
'attribute' // ...
],
ConsistentRead: false,
};
var sns_endpoint = null;
dynamodb.getItem(dynamodb_params, function(err, data) {
if (err) {
console.log(err.stack);
context.done(err, 'Errors while querying dynamodb!');
} else {
console.log(data);
sns_endpoint = data.Item.sns.S;
console.log("Result= " + data);
console.log("Item= " + data.Item);
console.log("sns= " + data.Item.sns);
console.log("value= " + data.Item.sns.S);
console.log("sns_endpoint= " + sns_endpoint);
context.done(null, 'Querying dynamodb succeeded!');
}
});
if( sns_endpoint != null ) {
console.log("sns_endpoint= " + sns_endpoint);
var sns_params = {
Message: payload,
Subject: 'Event Notification From Lambda',
TopicArn: sns_endpoint
};
var sns = new AWS.SNS();
sns.publish(sns_params, function(err, data) {
if (err) {
console.log(err.stack);
context.done(err, 'Errors while putting to SNS!');
} else {
console.log(data);
context.done(null, 'Putting to SNS succeeded!');
}
});
}
});
};
You are calling an asynchronous function, sns.publish(), within a forEach loop. Then you are immediately calling context.succeed(). As soon as context.succeed, context.fail or context.done are called, your Lambda function will exit. You need to modify your code to only call one of those after all asynchronous function calls have completed.