Lambda function is not triggered for all the s3 image upload - amazon-web-services

I have s3 bucket as below,
myBucket
a/
b/
c/
where myBucket is a s3 bucket and a, b, c are the key folders inside that bucket.
I will upload images into a/. The s3 event notification will trigger SQS which will then trigger lambda function which does the process of removing image background and uploads into b/ folder.
The problem here is for example, if I upload a folder which has around 26 images into s3 only 23 or 22 images are getting triggered by lambda and only those images are getting are processed.
For some reason s3 is not triggering all the images or is that something I should configure in my lambda function?
Here is my function code
exports.handler = async(event, context, callback) => {
try {
console.log(event.Records[0]);
var json = JSON.parse(event.Records[0]['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = json['Records'][0]['s3'];
console.log(json);
var srcBucket = json['bucket']['name'];
console.log('srcBucket: ' + srcBucket);
var srcKey = decodeURIComponent(json['object']['key'].replace(/\+/g, ' '));
console.log('srcKey: ' + srcKey);
var str = (srcKey.split('/').pop()).split('.')[0];
console.log('str: ' + str);
if (str != '') {
var folderPath = srcKey.substr(srcKey.indexOf('/') + 1).split('.')[0];
folderPath = folderPath.substring(0, folderPath.lastIndexOf('/'));
console.log('folderPath: ' + folderPath);
const params1 = { Bucket: srcBucket, Key: srcKey };
var origimage = await s3.getObject(params1).promise();
var destObject = await origimage.Body;
var destKey = 'removebg/' + folderPath + '/' + str + '.jpg';
var options = {
'method': 'POST',
'url': 'https://api.remove.bg/v1.0/removebg',
'headers': {
'X-Api-Key': 'xxxxxxxxxxx'
},
formData: {
'image_file': destObject,
'size': 'auto'
},
encoding: null
};
request(options, function(error, response, body) {
if (error) {
console.log(error);
sendmessage(error, 'Error removing image background', arn, srcBucket + '/' + srcKey, destBucket + destKey);
}
var params = { Bucket: destBucket, Key: destKey, Body: body };
s3.upload(params, function(err, data) {
if (err) {
console.log('Error uploading data: ', err);
sendmessage(err, 'Error uploading transparent image to s3', arn, srcBucket + '/' + srcKey, destBucket + destKey);
}
else { console.log('Successfully uploaded data to ' + destBucket); }
});
});
}
}
catch (e) {
console.log(e);
}
callback(null, 'All done!');
};
Please let me know. Thanks in advance.

I think your problem lies on these lines:
console.log(event.Records[0]);
var json = JSON.parse(event.Records[0]['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = json['Records'][0]['s3'];
Your function is only looking at the first record that is provided to the function. Multiple events can be given the the Lambda function, so your function should loop through the Records entries and process all of the events that are provided.
It should do something like:
for record in event.Records:
console.log(record);
var json = JSON.parse(record['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = record['s3'];

Related

Print the JSON value from AWS S3 bucket in Lambda function

What is the way to get the response body of the uploaded JSON file to the lambda function to print it?
I used the following code but it is specified for Content Type. Any suggestions for this please?
// console.log('Loading function');
const aws = require('aws-sdk');
const s3 = new aws.S3({ apiVersion: '2006-03-01' });
exports.handler = async (event, context) => {
//console.log('Received event:', JSON.stringify(event, null, 2));
// Get the object from the event and show its content type
const bucket = event.Records[0].s3.bucket.name;
const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: bucket,
Key: key,
};
try {
const { ContentType } = await s3.getObject(params).promise();
console.log('CONTENT TYPE:', ContentType);
console.log('Body: ', );
console.log("response: " + "I want to print the response body here when a JSON file uploaded")
return ContentType;
} catch (err) {
console.log(err);
const message = `Error getting object ${key} from bucket ${bucket}. Error : ` + err;
console.log(message);
throw new Error(message);
}
};
return value of getObject contains Body field.
const { ContentType, Body } = await s3.getObject(params).promise();
console.log('CONTENT TYPE:', ContentType);
console.log('Body: ', Body);

Amazon Lambda writing to DynamoDB

I'm new to dynamoDB and I'm trying to write some data to the table using Lambda.
So far I have this:
'AddFood': function () {
var FoodName = this.event.request.intent.slots.FoodName.value;
var FoodCalories = this.event.request.intent.slots.FoodCalories.value;
console.log('FoodName : ' + FoodName);
const params = {
TableName: 'Foods',
Item: {
'id': {"S": 3},
'calories': {"S": FoodCalories},
'food': {"S": FoodName}
}
};
writeDynamoItem(params, myResult=>{
var say = '';
say = myResult;
say = FoodName + ' with ' + FoodCalories + ' calories has been added ';
this.response.speak(say).listen('try again');
this.emit(':responseReady');
});
function writeDynamoItem(params, callback) {
var AWS = require('aws-sdk');
AWS.config.update({region: AWSregion});
var docClient = new AWS.DynamoDB();
console.log('writing item to DynamoDB table');
docClient.putItem(params, function (err, data) {
if (err) {
callback(err, null)
} else {
callback(null, data)
}
});
}
}
Does anyone know why the data is not appearing in the database?
I have checked the IAM and the policy is set to AmazonDynamoDBFullAccess.
After making a few changes to the write function, the following code allowed me to write items to the database:
function writeDynamoItem(params, callback) {
const AWS = require('aws-sdk');
AWS.config.update({region: AWSregion});
const docClient = new AWS.DynamoDB.DocumentClient({region: 'eu-west-1'});
console.log('writing item to DynamoDB table');
docClient.put(params, function (err, data) {
if (err) {
callback(err, null)
console.error("Unable to write item. Error JSON:", JSON.stringify(err, null, 2))
} else {
callback(null, data)
}
});
}
To write to DynamoDB from a Lambda function (using Python) you must use the boto3 package and load the dynamodb resource:
Hope this helps, it is writing food and calories from the event and writing them with a generated uuid
import boto3
import os
import uuid
def writeToDynamo(event, context):
recordId = str(uuid.uuid4())
voice = event["food"]
text = event["calories"]
print('Generating new DynamoDB record, with ID: ' + recordId)
print('Input food: ' + food)
print('Input calories: ' + calories)
#Creating new record in DynamoDB table
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table(os.environ['DB_TABLE_NAME'])
table.put_item(
Item={
'id' : recordId,
'food' : food,
'calories' : calories
}
)
return recordId

Creating lambda function to create thumbnail for AWS s3 bucket is not working

I am learning using AWS lambda functions. What I am trying to do is, when I upload an image (JPEG) file to the s3 bucket, the image will be resized. But it is not working. See what I have done below.
I create a folder. Then created a node_modules folder inside the previously created folder. Then created a file called CreateThumbnail.js inside the main folder.
This is the CreateThumbnail.js:
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
callback(null, "message");
}
);
};
Then I zipped the folder. Then I created a function in the AWS lambda console and upload the zip file from the UI as follow.
Then I added the s3 trigger as in the screenshot.
I also created the role with correct permission and policies.
But when I upload a JPG file to s3 bucket, it is neither resized not thumbnail is created. What could be wrong here?
This is the function policy:
Lambda functions send their debug information to Amazon CloudWatch Logs. Examine the log file to determine what went wrong.
If there is no log, then either the Lambda function never executed, or the Lambda function was not given sufficient permissions to write to CloudWatch Logs.
See: Accessing Amazon CloudWatch Logs for AWS Lambda

Error with AWS Lambda salt and hash

I've been using source code from AWS Lambda in Action - Poccia, to create users in a User Pool and Identity Pool. I keep getting the error:
Response:
{
"errorMessage": "RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Process exited before completing request"
}
Request ID:
"f6511085-f22c-11e7-be27-534dfc5d6456"
Function Logs:
START RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Version: $LATEST
2018-01-05T15:27:38.890Z f6511085-f22c-11e7-be27-534dfc5d6456 TypeError: Pass phrase must be a buffer
at TypeError (native)
at pbkdf2 (crypto.js:576:20)
at Object.exports.pbkdf2 (crypto.js:558:10)
at computeHash (/var/task/lib/cryptoUtils.js:10:10)
at InternalFieldObject.ondone (/var/task/lib/cryptoUtils.js:19:4)
END RequestId: f6511085-f22c-11e7-be27-534dfc5d6456
REPORT RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Duration: 113.62 ms Billed Duration: 200 ms Memory Size: 128 MB Max Memory Used: 33 MB
RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Process exited before completing request
I'm new to AWS Services and am not sure why this error is occurring. Below is the Lambda function I'm attempting to use and following is the cryptoUtils.js script it's referencing.
console.log('Loading function');
//Loading standard module, such as crypto and the AWS SDK
var AWS = require('aws-sdk');
var crypto = require('crypto');
var cryptoUtils = require('./lib/cryptoUtils.js'); //Loading the cryptoUtils.js module shared code, included in the uploaded ZIP archive
var config = require('./config.json'); //Loading the configuration in the config.json file, included in the uploaded ZIP archive
var dynamodb = new AWS.DynamoDB({
accessKeyId: 'usingKEYfromIAM',
secretAccessKey: 'usingKEYfromIAM',
}); //Getting the Amazon DynamoDB service object
var ses = new AWS.SES(); //Getting Amazon SES service object
function storeUser(email, password, salt, fn) { //The storeUser() function stores the new user in the DynamoDB table.
var len = 128;
crypto.randomBytes(len, function(err, token) { //Arandom token sent in the validation email and used to validate a user
if (err) return fn(err);
token = token.toString('hex');
dynamodb.putItem({ //Putting an item in the DynamoDB table
TableName: config.DDB_TABLE, //The table name is taken from the config.json configuration file.
//Most of the data is string ("S"), but the verifiede attribute is Boollean ("BOOL"),
//new users aren't verified (false), and the randomly generated token is stored in the "verifyToken" attribute
Item: {
email: {
S: email
},
passwordHash: {
S: password
},
passwordSalt: {
S: salt
},
verified: {
BOOL: false
},
verifyToken: {
S: token
}
},
ConditionExpression: 'attribute_not_exists (email)' //This condition avoids overwriting existing users (with the same email).
}, function(err, data) {
if (err) return fn(err);
else fn(null, token); //The storeUser() function returns the randomly generated token.
});
});
}
function sendVerificationEmail(email, token, fn) { //The send-VerificationEmail() funciton sends the verification email to the new user.
var subject = 'Verification Email for ' + config.EXTERNAL_NAME;
//The verification link, to the verify.hrml page, passes the randomly generated token as a query parameter.
var verificationLink = config.VERIFICATION_PAGE + '?email=' + encodeURIComponent(email) + '&verify=' + token;
ses.sendEmail({ //Sending the email in HTML format
Source: config.EMAIL_SOURCE,
Destination: {
ToAddresses: [
email
]
},
Message: {
Subject: {
Data: subject
},
Body: {
Html: {
Data: '<html><head>' + '<meta http-equiv= "Content-Type" content="test/html; charset=UTF-8" />' +
'<title>' + subject + '</title>' + '</head><body>' + 'Please <a href="' + verificationLink +
'">click here to verify your email address</a> or a copy & paste the following link in a browser:' +
'<br><br>' + '' + verificationLink + '' + '</body></html>'
}
}
}
}, fn);
}
exports.handler = (event, context, callback) => { //The function that's exported and can be invoked using AWS Lambda as createUser
//Getting the input parameters (email, password) from the event
var email = event.email;
var clearPassword = event.password;
//Using compute-Hash() from cryptoUtils.js to salt the password.
cryptoUtils.computeHash(clearPassword, function(err, salt, hash) {
if (err) {
callback('Error in hash: ' + err);
} else {
storeUser(email, hash, salt, function(err, token) { //Storing the user via the storeUser()function
if (err) {
if (err.code == 'ConditionalCheckFailedException') { //Checking if the database error is due to the email being already prsent in the database
//userID already found
callback(null, {
created: false
});
} else {
callback('Error in storUser: ' + err);
}
} else {
sendVerificationEmail(email, token, function(err, data) { //Sending the verification email
if (err) {
callback('Error in sendVerificationEmail: ' + err);
} else {
callback(null, {
created: true
});
}
});
}
});
}
});
};
var crypto = require('crypto');
function computeHash(password, salt, fn) {
var len = 512;
var iterations = 4096;
var digest = 'sha512';
if (3 == arguments.length) {
crypto.pbkdf2(password, salt, iterations, len, digest, function(err, derivedKey) {
if (err) return fn(err);
else fn(null, salt, derivedKey.toString('base64'));
});
} else {
fn = salt;
crypto.randomBytes(len, function(err, solat) {
if (err) return fn(err);
salt = salt.toString('base64');
computeHash(password, salt, fn);
});
}
}
module.exports.computeHash = computeHash;
If anybody has any suggestions or needs more information to help me determine why the error is occurring I would greatly appreciate it. Thank you.
The password you're passing is a number?
If so:
Convert it to String.
If you don't want to do that, you can pass a Buffer object:
Pass a Buffer object using the class Method Buffer.from(string[, encoding])
https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_string_encoding
crypto.pbkdf2(Buffer.from(password, 'utf8'), salt, iterations, len, digest, function(err, derivedKey) {
if (err) return fn(err);
else fn(null, salt, derivedKey.toString('base64'));
});
Hope it helps!
var crypto = require('crypto');
function computeHash(password, salt, fn) {
// Bytesize. The larger the numbers, the better the security, but the longer it will take to complete
var len = 512;
var iterations = 4096;
var digest = 'sha512';
if (3 == arguments.length) {
crypto.pbkdf2(Buffer.from(password, 'utf8'), salt, iterations, len, digest, function(err, derivedKey) {
if (err) return fn(err);
else fn(null, salt, derivedKey.toString('base64'));
});
} else {
fn = salt;
crypto.randomBytes(len, function(err, salt) {
if (err) return fn(err);
salt = salt.toString('base64');
computeHash(password, salt, fn);
});
}
}
module.exports.computeHash = computeHash;
Error "TypeError: Pass phrase must be a buffer"
does suggest trying a Buffer conversion of input String
Before your test of Buffer.from(password, 'utf8')
did you verify input value is encoding 'utf8',
and not some other encoding such as base64 or latin1 ?
List of encodings that Node.js supports

Timeout while launching EC2 instances from AWS Lambda

I have been trying to launch a new EC2 instance as well as add a piece of string data to my SQS through lambda in response to an object upload event in my s3 bucket.
I have been able to successfully to update my SQS but has been unable to initialise the new EC2 instance. Despite setting the time allocation to the lambda function to the maximum time of 5mins and increasing memory allocation, an operation timeout error continuously surface.
My code is as below. Can anyone point out what are the potential causes for such an error? While I have stuck my whole piece of code here for reference, the area concerning the launch is towards the end of the code.
Thank you so much!
console.log('Loading function');
var fs = require('fs');
var async = require('async');
var aws = require('aws-sdk');
var s3 = new aws.S3({ apiVersion: '2006-03-01' });
var sqs = new aws.SQS({apiVersion: '2012-11-05'});
var ecs = new aws.ECS({apiVersion: '2014-11-13'});
var ec2 = new aws.EC2({apiVersion: '2015-10-01'});
// Check if the given key suffix matches a suffix in the whitelist. Return true if it matches, false otherwise.
exports.checkS3SuffixWhitelist = function(key, whitelist) {
if(!whitelist){ return true; }
if(typeof whitelist == 'string'){ return key.match(whitelist + '$') }
if(Object.prototype.toString.call(whitelist) === '[object Array]') {
for(var i = 0; i < whitelist.length; i++) {
if(key.match(whitelist[i] + '$')) { return true; }
}
return false;
}
console.log(
'Unsupported whitelist type (' + Object.prototype.toString.call(whitelist) +
') for: ' + JSON.stringify(whitelist)
);
return false;
};
exports.handler = function(event, context) {
//console.log('Received event:', JSON.stringify(event, null, 2));
console.log('Received event:');
//Read in the configuration file
var config = JSON.parse(fs.readFileSync('config.json', 'utf8'));
if(!config.hasOwnProperty('s3_key_suffix_whitelist')) {
config.s3_key_suffix_whitelist = false;
}
console.log('Config: ' + JSON.stringify(config));
var name = event.Records[0].s3.object.key;
if(!exports.checkS3SuffixWhitelist(name, config.s3_key_suffix_whitelist)) {
context.fail('Suffix for key: ' + name + ' is not in the whitelist')
}
// Get the object from the event and show its key
var bucket = event.Records[0].s3.bucket.name;
var key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
var params = {
Bucket: bucket,
Key: key
};
s3.getObject(params, function(err, data) {
if (err) {
console.log(err);
var message = "Error getting object " + key + " from bucket " + bucket +
". Make sure they exist and your bucket is in the same region as this function.";
console.log(message);
context.fail(message);
} else {
console.log('CONTENT TYPE:', key);
context.succeed(key);
}
});
//Sending the image key as a message to SQS and starting a new instance on EC2
async.waterfall([
function(next){
var params = {
MessageBody: JSON.stringify(event),
QueueUrl: config.queue
};
console.log("IN QUEUE FUNCTION");
sqs.sendMessage(params, function (err, data) {
if (err) { console.warn('Error while sending message: ' + err); }
else { console.info('Message sent, ID: ' + data.MessageId); }
next(err);
});
},
function (next) {
console.log("INITIALIZING ECS");
var params = {
ImageId: 'ami-e559b485',
MinCount: 1,
MaxCount: 1,
DryRun: true,
InstanceType: 't2.micro',
KeyName: 'malpem2102',
SubnetId: 'subnet-e8607e8d'
}
ec2.runInstances(params, function(err,data){
if(err){
console.log(err, err.stack);
context.fail('Error', "Error getting file: " + err);
return;
} else{
var instanceId = data.Instances[0].InstanceId;
console.log("Created instance ", instanceId);
context.suceed("Created instance");
}
});
}
], function(err){
if (err) {
context.fail('An error has occurred: ' + err);
}
else {
context.succeed('Successfully processed Amazon S3 URL.');
}
}
);
};