I'm trying to write a lambda function that is triggered whenever a new image is written to an S3 bucket. The trigger is already setup using the correct S3 bucket so I know that's not the issue.
The lambda function itself has the roles s3:GetObject and dynamodb.* (which should be full access for DynamoDB writes).
The goal here is to simply write to a table that I've already created named 'art' and insert a primary key value (imageTitle) which i'm trying to obtain in var imageName. Then I want to assign to that key an attribute which is the url of that image, which I'm storing in var url.
This is just a simple exercise I'm trying to get down so that I can move on to more complex DB writes. But as of right now, I'm not getting anything written to the art table, even though I am adding new objects to the S3 bucket which sets off the trigger. Is it possible that the lambda function isn't deployed? I wrote it directly in the inline editor of the Lambda Console and saved it.
Here's the code:
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region: 'us-east-1'});
const s3 = new AWS.S3();
exports.handler = async (event, context, callback) => {
//var sourceBucket = event.Records[0].s3.bucket.name;
var sourceKey = event.Records[0].s3.object.key;
var imageName = sourceKey.stringify;
//generate imageURL
var url = "https://s3.amazonaws.com/myapp-20181030214040-deployment/public/" + imageName;
var params = {
TableName : 'art',
Item: {
imageTitle: imageName,
imageURL: url
}
};
docClient.put(params, function(err, data) {
if (err) console.log(err);
else console.log(data);
});
};
The problem here is that you're using an async lambda but returning nothing that is awaitable. This means that your lambda is terminating before the docClient.put operation is sent.
With an async handler you need to await and return, for example you could change this snippet to:
const data = await docClient.put(params).promise();
return data;
Or instead you could use the callback approach (note the signature of the handler does not contain async anymore):
exports.handler = (event, context, callback) => {
// ... the rest of the code as was ...
docClient.put(params, function(err, data) {
if (err) {
console.log(err);
callback(err); // return 'lambda invoke failed because of the error' - will cause s3 to retry three times.
} else {
console.log(data);
callback(null, data); // return 'nothing failed'; n.b. the s3 trigger ignores whatever you return.
}
});
};
Related
I am querying dynamodb from lambda function written in node.js env -
Trying to query for table CurrencyPrice where Currency column has value "BLC".
When I test my function in lambda console, - it prints until second console log - "querying DB ConsolePrice" and returns a NULL response. It does not print either of the next two console logs and not sure if it is even connecting to DB.
It seems that the code does not go into ddb.query() function at all - had tried putting all loggers in this function but none gets printed.
I have tried checking all possible aws documentation but not able to understand why this function is not getting executed.
My code looks something like below -
var AWS = require ('aws-sdk');
exports.handler = async (event) =>
{
AWS.config.update({region: 'ap-southeast-2'});
console.log("i am in function");
// Create DynamoDB service object
var ddb = new AWS.DynamoDB.DocumentClient();
var table = 'CurrencyPrice';
var params = {
"Select": "ALL_ATTRIBUTES","TableName": "CurrencyPrice",
};
console.log("querying DB" + table);
ddb.query(params, function(err, data) {
console.log("i am in ddb query");
if (err) {
console.error("Unable to query. Error:", JSON.stringify(err, null,2));
} else {
console.log(data);
}
});
};
Current result that I am getting in lambda console:
Response:
null
Request ID:"XXXX"
Function Logs:
START RequestId: XXX Version: $LATEST
2019-06-02T13:31:55.189Z XXXX INFO i am in function
2019-06-02T13:31:55.331Z XXXX INFO querying DBCurrencyPrice
2019-06-02T13:31:55.390Z XXXX INFO { Select: 'ALL_ATTRIBUTES', TableName: 'CurrencyPrice' }
END RequestId: XXXX
I expect that at least it prints "Unable to Query" or actual data that it connects to DB and query?
If you're using async/await you would want to return a promise.
var AWS = require("aws-sdk");
AWS.config.update({ region: "ap-southeast-2" });
var ddb = new AWS.DynamoDB.DocumentClient();
exports.handler = async event => {
console.log("i am in function");
// Create DynamoDB service object
var table = "CurrencyPrice";
var params = {
Select: "ALL_ATTRIBUTES",
TableName: "CurrencyPrice"
};
console.log("querying DB" + table);
return ddb
.query(params)
.promise()
.then((err, data) => {
console.log("i am in ddb query");
if (err) {
console.error("Unable to query. Error:", JSON.stringify(err, null, 2));
} else {
console.log(data);
}
});
};
The AWS example
ddbClient.query(params, callback)
shows a synchronous matter. Execution continues without waiting for the callback finishes. That's why you got Null.
You will have to wrap this .query() call into a Promise, so the execution will wait for completion of this callback.
Make query a promise and wait for its fulfilled state.
const promise = await ddb.query(params, function(err, data) {
console.log("i am in ddb query");
if (err) {
console.error("Unable to query. Error:", JSON.stringify(err, null,2));
} else {
console.log(data);
// process your data
}
}).promise();
return promise;
I've been stuck on this code for about a day now.
I am just trying to add information into DynamoDB through a launch request using Alexa.
I get the following error code:
"errorMessage": "RequestId: f96ae2cb-1dbf-11e7-a267-b7cf2f2c95a0 Process exited before completing request"
The information actually gets inserted into DynamoDB, but I can't add more functions to the program because of the error.
From what I understand, it may be a problem with the callback.
I have tried many different ways to "callback" or return something, but I haven't figured out how to avoid the error.
If I uncomment this.emit(':tell', "Hello, inserting Apples into DynamoDB"); the error goes away, but no information gets inserted.
What am I doing wrong and how can I fix it?
Below is my code;
'use strict';
var Alexa = require('alexa-sdk');
const doc = require('dynamodb-doc');
const dynamo = new doc.DynamoDB();
exports.handler = function(event, context, callback) {
var alexa = Alexa.handler(event, context);
alexa.registerHandlers(handlers);
alexa.execute();
};
var handlers = {
'LaunchRequest': function(event, context, callback) {
// this.emit(':tell', "Hello, inserting Apples into DynamoDB");
var params = {
Item: {
date: Date.now(),
message: "Apples"
},
TableName: '_yourTableName'
};
dynamo.putItem(params, function(err, data) {
if (err) {
callback(err, null);
} else {
callback(null, data);
}
});
context.done();
}
};
This is because you are adding values to Dynamodb, which is a callback but context.done(); is written outside the callback. Before dynamoDb completes the operation it will call context.done(); hence it will exit the process
I'm a beginner with Amazon Web Services and NodeJS.
I wrote a Lambda function, triggered by AWS IoT, that parse a JSON.
enter code here
use strict';
console.log('Loading function');
exports.handler = (event, context, callback) => {
console.log('Received event:', JSON.stringify(event, null, 2));
console.log('Id =', event.Id);
console.log('Ut =', event.Ut);
console.log('Temp =', event.Temp);
console.log('Rh =', event.Rh);
//callback(null, event.key1); // Echo back the first key value
//callback('Something went wrong');
};
Now I want to store the json fields into a DynamoDB table.
Any suggestion?
Thanks a lot!
Preliminary Steps:-
Create an IAM Lambda role with access to dynamodb
Launch Lambda in the same region as your dynamodb region
Create the DynamoDB table with correct key attributes defined
Sample code:-
The below code snippet is a sample code to give you some idea on how to put the item. Please note that it has to be slightly altered for your requirement (with table name and key attributes). It is not fully tested code.
use strict';
console.log('Loading function');
var AWS = require('aws-sdk');
var dynamodb = new AWS.DynamoDB({apiVersion: '2012-08-10'});
exports.handler = (event, context, callback) => {
console.log(JSON.stringify(event, null, ' '));
var tableName = "yourtablename";
dynamodb.putItem({
"TableName": tableName,
"Item" : {
"Id": event.Id,
"Ut": event.Ut,
"Temp": event.Temp,
"Rh":event.Rh
}
}, function(err, data) {
if (err) {
console.log('Error putting item into dynamodb failed: '+err);
context.done('error');
}
else {
console.log('great success: '+JSON.stringify(data, null, ' '));
context.done('Done');
}
});
};
Note:-
No need to mention the data type explicitly for String and Number as long as the data type is in compliance with JavaScript String and Number. The DynamoDB will automatically interpret the data type for String and Number.
There is also a shorter version for this with async/await:
'use strict';
const AWS = require('aws-sdk');
const dynamodb = new AWS.DynamoDB({apiVersion: '2012-08-10'});
exports.handler = async (event) => {
const tableName = "yourtablename";
try {
await dynamodb.putItem({
"TableName": tableName,
"Item" : {
"Id": event.Id,
"Ut": event.Ut,
"Temp": event.Temp,
"Rh":event.Rh
}
}).promise();
} catch (error) {
throw new Error(`Error in dynamoDB: ${JSON.stringify(error)}`);
}
};
Is there are way to push data from a Lambda function to a Kinesis stream? I have searched the internet but have not found any examples related to it.
Thanks.
Yes, you can send information from Lambda to Kinesis Stream and it is very simple to do. Make sure you are running Lambda with the right permissions.
Create a file called kinesis.js, This file will provide a 'save' function that receives a payload and sends it to the Kinesis Stream. We want to be able to include this 'save' function anywhere we want to send data to the stream. Code:
const AWS = require('aws-sdk');
const kinesisConstant = require('./kinesisConstants'); //Keep it consistent
const kinesis = new AWS.Kinesis({
apiVersion: kinesisConstant.API_VERSION, //optional
//accessKeyId: '<you-can-use-this-to-run-it-locally>', //optional
//secretAccessKey: '<you-can-use-this-to-run-it-locally>', //optional
region: kinesisConstant.REGION
});
const savePayload = (payload) => {
//We can only save strings into the streams
if( typeof payload !== kinesisConstant.PAYLOAD_TYPE) {
try {
payload = JSON.stringify(payload);
} catch (e) {
console.log(e);
}
}
let params = {
Data: payload,
PartitionKey: kinesisConstant.PARTITION_KEY,
StreamName: kinesisConstant.STREAM_NAME
};
kinesis.putRecord(params, function(err, data) {
if (err) console.log(err, err.stack);
else console.log('Record added:',data);
});
};
exports.save = (payload) => {
const params = {
StreamName: kinesisConstant.STREAM_NAME,
};
kinesis.describeStream(params, function(err, data) {
if (err) console.log(err, err.stack);
else {
//Make sure stream is able to take new writes (ACTIVE or UPDATING are good)
if(data.StreamDescription.StreamStatus === kinesisConstant.STATE.ACTIVE
|| data.StreamDescription.StreamStatus === kinesisConstant.STATE.UPDATING ) {
savePayload(payload);
} else {
console.log(`Kinesis stream ${kinesisConstant.STREAM_NAME} is ${data.StreamDescription.StreamStatus}.`);
console.log(`Record Lost`, JSON.parse(payload));
}
}
});
};
Create a kinesisConstant.js file to keep it consistent :)
module.exports = {
STATE: {
ACTIVE: 'ACTIVE',
UPDATING: 'UPDATING',
CREATING: 'CREATING',
DELETING: 'DELETING'
},
STREAM_NAME: '<your-stream-name>',
PARTITION_KEY: '<string-value-if-one-shard-anything-will-do',
PAYLOAD_TYPE: 'String',
REGION: '<the-region-where-you-have-lambda-and-kinesis>',
API_VERSION: '2013-12-02'
}
Your handler file: we added the 'done' function to send a response to whoever wants to send the data to the stream but 'kinesis.save(event)' does all the work.
const kinesis = require('./kinesis');
exports.handler = (event, context, callback) => {
console.log('LOADING handler');
const done = (err, res) => callback(null, {
statusCode: err ? '400' : '200',
body: err || res,
headers: {
'Content-Type': 'application/json',
},
});
kinesis.save(event); // here we send it to the stream
done(null, event);
}
This should be done exactly like doing it on your computer.
Here's an example in nodejs:
let aws = require('aws');
let kinesis = new aws.Kinesis();
// data that you'd like to send
let data_object = { "some": "properties" };
let data = JSON.stringify(data_object);
// push data to kinesis
const params = {
Data: data,
PartitionKey: "1",
StreamName: "stream name"
}
kinesis.putRecord(params, (err, data) => {
if (err) console.error(err);
else console.log("data sent");
}
Please note, this piece of code will not work, as the Lambda has no permissions to your stream.
When accessing AWS resources through Lambda, it is better to use IAM roles;
When configuring a new Lambda, you can choose existing / create a role.
Go to IAM, then Roles, and pick the role name you assigned to the Lambda function.
Add the relevant permissions (putRecord, putRecords).
Then, test the Lambda.
Yes, this can be done, I was trying to accomplish the same thing and was able to do so in Lambda using Node.js 4.3 runtime, and it also works in version 6.10.
Here is the code:
Declare the following at the top of your Lambda function:
var AWS = require("aws-sdk");
var kinesis = new AWS.Kinesis();
function writeKinesis(rawdata){
data = JSON.stringify(rawdata);
params = {Data: data, PartitionKey: "<PARTITION_KEY>", StreamName: "<STREAM_NAME>"};
kinesis.putRecord(params, (err, data) => {
if (err) console.error(err);
else console.log("data sent");
});
}
Now, in the exports.handler, call the function:
writeKinesis(<YOUR_DATA>);
A few things to note... for Kinesis to ingest data, it must be encoded. In the example below, I have function that takes logs from CloudWatch, and sends them over to a Kinesis stream.
Note that I'm inserting the contents of buffer.toString('utf8') into the writeKinesis function:
exports.handler = function(input, context) {
...
var zippedInput = new Buffer(input.awslogs.data, 'base64');
zlib.gunzip(zippedInput, function(error, buffer) {
...
writeKinesis(buffer.toString('utf8'));
...
}
...
}
Finally, in IAM, configure the appropriate permissions. Your Lambda function has to run within the context of an IAM role that includes the following permissions below. In my case, I just modified the default lambda_elasticsearch_execution role to include a policy called "lambda_kinesis_execution" with the following code:
"Effect": "Allow",
"Action": [
"kinesis:*"
],
"Resource": [
"<YOUR_STREAM_ARN>"
]
I have a use case of pushing SNS notifications from Amazon lambda. I have written the following code, with the IAM role having the permission to invoke SNS. Even with the Kenesis trigger, this Lambda function is unable to send any notification to SNS. I confirmed this by, subscribing my email id to the SNS.
[EDIT]: Just a follow up question. I now need to query DynamoDB and based on the output, need to call different end point of SNS. Now, when I query DynamoDB, the execution stops after DynamoDB query and not even progressing for SNS checks.
Following is my lambda function code.
console.log('Loading function');
var AWS = require('aws-sdk');
exports.handler = function(event, context) {
event.Records.forEach(function(record) {
//var payload = new Buffer(record.kinesis.data, 'base64').toString('ascii');
var payload = record.kinesis.data;
console.log('Decoded payload:', payload);
var dynamodb = new AWS.DynamoDB();
var dynamodb_params = {
Key: {
dataSource: {
S: payload
}
},
TableName: 'TableName',
AttributesToGet: [
'attribute' // ...
],
ConsistentRead: false,
};
var sns_endpoint = null;
dynamodb.getItem(dynamodb_params, function(err, data) {
if (err) {
console.log(err.stack);
context.done(err, 'Errors while querying dynamodb!');
} else {
console.log(data);
sns_endpoint = data.Item.sns.S;
console.log("Result= " + data);
console.log("Item= " + data.Item);
console.log("sns= " + data.Item.sns);
console.log("value= " + data.Item.sns.S);
console.log("sns_endpoint= " + sns_endpoint);
context.done(null, 'Querying dynamodb succeeded!');
}
});
if( sns_endpoint != null ) {
console.log("sns_endpoint= " + sns_endpoint);
var sns_params = {
Message: payload,
Subject: 'Event Notification From Lambda',
TopicArn: sns_endpoint
};
var sns = new AWS.SNS();
sns.publish(sns_params, function(err, data) {
if (err) {
console.log(err.stack);
context.done(err, 'Errors while putting to SNS!');
} else {
console.log(data);
context.done(null, 'Putting to SNS succeeded!');
}
});
}
});
};
You are calling an asynchronous function, sns.publish(), within a forEach loop. Then you are immediately calling context.succeed(). As soon as context.succeed, context.fail or context.done are called, your Lambda function will exit. You need to modify your code to only call one of those after all asynchronous function calls have completed.