Unable to perform getobject from aws s3 in aws-lambda - amazon-web-services

I am new to aws-lambda and aws-s3. I am trying to create one microservice using api-gateway, aws s3 and aws lambda.
I have written lambda function to retrive object from s3, but it sends null and not throwing any error. I am not sure what is going wrong.
I have setup role and gave access to s3 and used that role for lambda
const AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event) => {
var params = {
"Bucket": "bucketname",
"Key": "keyname"
};
s3.getObject(params, function(err, data){
if(err) {
return "error while fetching data";
} else {
return data;
}
});
};
What am I doing wrong here?

You should return the data in the proper response format required for API gateway proxy and use the callback parameter to reply, so change your function to look like this:
const AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event, context, callback) => {
var params = {
"Bucket": "bucketname",
"Key": "keyname"
};
s3.getObject(params, function(err, data){
if(err) {
return callback(new Error("error while fetching data"));
} else {
let response = {
statusCode: 200,
headers: {
"x-custom-header" : "my custom header value"
},
body: JSON.stringify(data)
};
return callback(null, response);
}
});
};
If you're not using API gateway proxy Lambda integration, then you can simply change the response to just return callback(null, data);

Related

Sending data from API Gateway to Lambda

Goal: Get real-time stock data from IEX API, store it in DynamoDB, use data in DynamoDB to display on site.
Edit: I want to use the pull mechanism from the API where I can pull data periodically.
Currently, on API gateway, I created a get method for my stocks resource where I set the integration type to HTTP and entered the IEX Endpoint URL as the Endpoint URL. For now, I just got the data for one stock and the JSON Response Body looks like:
{
"symbol": "AAPL",
"companyName": "Apple Inc",
"primaryExchange": "NASDAQ/NGS (GLOBAL SELECT MARKET)",
"calculationPrice": "close",
"open": 132.5,
"openTime": 1610116201607,
"openSource": "official",
"close": 132.05,
"closeTime": 1610139600449,
"closeSource": "official"
}
I want to integrate Lambda here somehow so that it can get this API data and store it in DynamoDB. So far this is what my Lambda function looks like:
'use strict';
const AWS = require('aws-sdk');
exports.handler = async (event, context) => {
const documentClient = new AWS.DynamoDB.DocumentClient();
let responseBody = "";
let statusCode = 0;
const { symbol, companyName } = JSON.parse(event.body);
const params = {
TableName: "Stocks",
Item: {
symbol: symbol,
companyName: companyName
}
};
try {
const data = await documentClient.put(params).promise();
responseBody = JSON.stringify(data);
statusCode = 201;
} catch(err) {
responseBody = `Unable to put product: ${err}`;
statusCode = 403;
}
const response = {
statusCode: statusCode,
headers: {
"Content-Type": "application/json"
},
body: responseBody
};
return response
};
How do I get the API data from API Gateway and integrate it with a Lambda function that stores this data to DynamoDB?

AWS Lambda error { "errorType": "string", "errorMessage": "error", "trace": [] }, when send request on api gateway

I'm trying to build my first lambda function that triggered by an API gateway request, then sends to Queue service where I included down below: /when I send a request through api gateway, it gives status code: 200 but queue doesn't get any messages/
var AWS = require('aws-sdk');
var sqs = new AWS.SQS();
exports.handler = function(event, context, callback) {
var params = {
MessageBody: JSON.stringify(event),
QueueUrl: 'https://sqs.ap-southeast-1.amazonaws.com/********/****.fifo'
};
sqs.sendMessage(params, function(err, data) {
if (err) {
console.log('error:', "Fail Send Message", +err);
context.done('error', "ERROR Put SQS");
} else {
console.log('data:', data.MessageId);
context.done(null, '');
}
});
}

Lambda SDK Calls No Response

I am trying to do a pretty simple "hello world" in AWS Lambda. I tried a few services that only call the AWS SDK and just try to read. My callback never gets called. I have to be missing something. Any help appreciated!
var AWS = require("aws-sdk");
exports.handler = async (event) => {
// TODO implement
const response = {
statusCode: 200,
body: JSON.stringify('Hello from Lambda!'),
};
var s3 = new AWS.S3(); // confirmed this is not null
s3.listBuckets({}, function(err, data) {
// never reaches here!
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
return response;
};
I did create a role this lambda is using that has S3 access. :-)
It seems that because I chose the Node 8.x runtime, I needed to use one of those async constructs. This worked...
let AWS = require('aws-sdk');
let s3 = new AWS.S3();
exports.handler = async (event) => {
return await s3.listBuckets().promise() ;
};
This is a synchronization problem.
Your return response code is executed before your callback is invoked.
you'll have to put your return statement inside your callback or use async/await
Returning inside your callback:
var AWS = require("aws-sdk");
exports.handler = async (event) => {
const response = {
statusCode: 200,
body: JSON.stringify('Hello from Lambda!'),
};
var s3 = new AWS.S3();
s3.listBuckets({}, function (err, data) {
if (err) {
console.log(err, err.stack);
return {
statusCode: 500,
message: 'some error'
}
}
return response
});
}
Using async/await:
var AWS = require("aws-sdk");
exports.handler = async (event) => {
const response = {
statusCode: 200
};
var s3 = new AWS.S3();
await s3.listBuckets().promise();
return response;
}
I'd go with the async/await approach as it's much cleaner and more readable. It's also easier to work with promises than with callbacks.
EDIT: The OP claimed it didn't work. So I have decided to test it on my own. The above code works, with a very small change just to add the listed buckets to the response. Here's the final code:
var AWS = require("aws-sdk");
exports.handler = async (event) => {
const response = {
statusCode: 200
};
var s3 = new AWS.S3();
const buckets = await s3.listBuckets().promise();
response.body = JSON.stringify(buckets);
return response;
}
And here's the output in CloudWatch Logs:

How to get the attributes of an added item in DynamoDB using Lambda function

I have an AWS Lambda function which is being triggered by AWS DynamoDB. The function checks if the operation in the DB is INSERT, and if it does, then it sends a new push message using AWS SNS:
console.log('Loading function');
var AWS = require('aws-sdk');
AWS.config.region = 'us-east-1';
exports.handler = function(event, context, callback) {
console.log("\n\nLoading handler\n\n");
var sns = new AWS.SNS();
console.log(JSON.stringify(event, null, 2));
event.Records.forEach(function(record) {
if (record.eventName == "INSERT") {
sns.publish({
Subject: 'Test',
Message: 'Test publish to SNS from Lambda',
TopicArn: 'TOPIC-ARN'
}, function(err, data) {
if (err) {
console.log(err.stack);
return;
}
console.log('push sent');
console.log(data);
context.done(null, 'Function Finished!');
});
}
});
callback(null, "message");
};
Is there a way that I could get the attributes of the newly added item?
var id = record.dynamodb.Keys.Id.N;

Pushing AWS Lambda data to Kinesis Stream

Is there are way to push data from a Lambda function to a Kinesis stream? I have searched the internet but have not found any examples related to it.
Thanks.
Yes, you can send information from Lambda to Kinesis Stream and it is very simple to do. Make sure you are running Lambda with the right permissions.
Create a file called kinesis.js, This file will provide a 'save' function that receives a payload and sends it to the Kinesis Stream. We want to be able to include this 'save' function anywhere we want to send data to the stream. Code:
const AWS = require('aws-sdk');
const kinesisConstant = require('./kinesisConstants'); //Keep it consistent
const kinesis = new AWS.Kinesis({
apiVersion: kinesisConstant.API_VERSION, //optional
//accessKeyId: '<you-can-use-this-to-run-it-locally>', //optional
//secretAccessKey: '<you-can-use-this-to-run-it-locally>', //optional
region: kinesisConstant.REGION
});
const savePayload = (payload) => {
//We can only save strings into the streams
if( typeof payload !== kinesisConstant.PAYLOAD_TYPE) {
try {
payload = JSON.stringify(payload);
} catch (e) {
console.log(e);
}
}
let params = {
Data: payload,
PartitionKey: kinesisConstant.PARTITION_KEY,
StreamName: kinesisConstant.STREAM_NAME
};
kinesis.putRecord(params, function(err, data) {
if (err) console.log(err, err.stack);
else console.log('Record added:',data);
});
};
exports.save = (payload) => {
const params = {
StreamName: kinesisConstant.STREAM_NAME,
};
kinesis.describeStream(params, function(err, data) {
if (err) console.log(err, err.stack);
else {
//Make sure stream is able to take new writes (ACTIVE or UPDATING are good)
if(data.StreamDescription.StreamStatus === kinesisConstant.STATE.ACTIVE
|| data.StreamDescription.StreamStatus === kinesisConstant.STATE.UPDATING ) {
savePayload(payload);
} else {
console.log(`Kinesis stream ${kinesisConstant.STREAM_NAME} is ${data.StreamDescription.StreamStatus}.`);
console.log(`Record Lost`, JSON.parse(payload));
}
}
});
};
Create a kinesisConstant.js file to keep it consistent :)
module.exports = {
STATE: {
ACTIVE: 'ACTIVE',
UPDATING: 'UPDATING',
CREATING: 'CREATING',
DELETING: 'DELETING'
},
STREAM_NAME: '<your-stream-name>',
PARTITION_KEY: '<string-value-if-one-shard-anything-will-do',
PAYLOAD_TYPE: 'String',
REGION: '<the-region-where-you-have-lambda-and-kinesis>',
API_VERSION: '2013-12-02'
}
Your handler file: we added the 'done' function to send a response to whoever wants to send the data to the stream but 'kinesis.save(event)' does all the work.
const kinesis = require('./kinesis');
exports.handler = (event, context, callback) => {
console.log('LOADING handler');
const done = (err, res) => callback(null, {
statusCode: err ? '400' : '200',
body: err || res,
headers: {
'Content-Type': 'application/json',
},
});
kinesis.save(event); // here we send it to the stream
done(null, event);
}
This should be done exactly like doing it on your computer.
Here's an example in nodejs:
let aws = require('aws');
let kinesis = new aws.Kinesis();
// data that you'd like to send
let data_object = { "some": "properties" };
let data = JSON.stringify(data_object);
// push data to kinesis
const params = {
Data: data,
PartitionKey: "1",
StreamName: "stream name"
}
kinesis.putRecord(params, (err, data) => {
if (err) console.error(err);
else console.log("data sent");
}
Please note, this piece of code will not work, as the Lambda has no permissions to your stream.
When accessing AWS resources through Lambda, it is better to use IAM roles;
When configuring a new Lambda, you can choose existing / create a role.
Go to IAM, then Roles, and pick the role name you assigned to the Lambda function.
Add the relevant permissions (putRecord, putRecords).
Then, test the Lambda.
Yes, this can be done, I was trying to accomplish the same thing and was able to do so in Lambda using Node.js 4.3 runtime, and it also works in version 6.10.
Here is the code:
Declare the following at the top of your Lambda function:
var AWS = require("aws-sdk");
var kinesis = new AWS.Kinesis();
function writeKinesis(rawdata){
data = JSON.stringify(rawdata);
params = {Data: data, PartitionKey: "<PARTITION_KEY>", StreamName: "<STREAM_NAME>"};
kinesis.putRecord(params, (err, data) => {
if (err) console.error(err);
else console.log("data sent");
});
}
Now, in the exports.handler, call the function:
writeKinesis(<YOUR_DATA>);
A few things to note... for Kinesis to ingest data, it must be encoded. In the example below, I have function that takes logs from CloudWatch, and sends them over to a Kinesis stream.
Note that I'm inserting the contents of buffer.toString('utf8') into the writeKinesis function:
exports.handler = function(input, context) {
...
var zippedInput = new Buffer(input.awslogs.data, 'base64');
zlib.gunzip(zippedInput, function(error, buffer) {
...
writeKinesis(buffer.toString('utf8'));
...
}
...
}
Finally, in IAM, configure the appropriate permissions. Your Lambda function has to run within the context of an IAM role that includes the following permissions below. In my case, I just modified the default lambda_elasticsearch_execution role to include a policy called "lambda_kinesis_execution" with the following code:
"Effect": "Allow",
"Action": [
"kinesis:*"
],
"Resource": [
"<YOUR_STREAM_ARN>"
]