i use this Lambda function to generate thumbnails on the fly. But i get the following error:
REPORT RequestId: 9369f148-2a85-11e7-a571-5f1e1818669e Duration: 188.18 ms Billed Duration: 200 ms Memory Size: 1536 MB Max Memory Used: 1536 MB
AND...
RequestId: 9369f148-2a85-11e7-a571-5f1e1818669e Process exited before completing request
So i think i reach the max Memory Limit. Without the function "uploadRecentImage()" it works. But if i add a new size to imgVariants[] i will also hit the Memory Limit.
I think the way the function handle the imgVariants (each loop) will cause this but i don't know to make it better.
I will be grateful for any help.
Here is my function:
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm').subClass({
imageMagick: true
}); // use ImageMagick
var util = require('util');
// configuration as code - add, modify, remove array elements as desired
var imgVariants = [
{
"SIZE": "Large1",
"POSTFIX": "-l",
"MAX_WIDTH": 6000,
"MAX_HEIGHT": 6000,
"SIZING_QUALITY": 75,
"INTERLACE": "Line"
},
{
"SIZE": "Large1",
"POSTFIX": "-l",
"MAX_WIDTH": 1280,
"MAX_HEIGHT": 1280,
"SIZING_QUALITY": 75,
"INTERLACE": "Line"
},
{
"SIZE": "Large1",
"POSTFIX": "-l",
"MAX_WIDTH": 500,
"MAX_HEIGHT": 500,
"SIZING_QUALITY": 75,
"INTERLACE": "Line"
},
{
"SIZE": "Large1",
"POSTFIX": "-l",
"MAX_WIDTH": 100,
"MAX_HEIGHT": 100,
"SIZING_QUALITY": 75,
"INTERLACE": "Line"
}
];
var DST_BUCKET_POSTFIX = "resized";
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function (event, context) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {
depth: 5
}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
// derive the file name and extension
var srcFile = srcKey.match(/(.+)\.([^.]+)/);
var srcName = srcFile[1];
var scrExt = srcFile[2];
// set the destination bucket
var dstBucket = srcBucket + DST_BUCKET_POSTFIX;
// make sure that source and destination are different buckets.
if (srcBucket === dstBucket) {
console.error("Destination bucket must be different from source bucket.");
return;
}
if (!scrExt) {
console.error('unable to derive file type extension from file key ' + srcKey);
return;
}
if (scrExt != "jpg" && scrExt != "png") {
console.log('skipping non-supported file type ' + srcKey + ' (must be jpg or png)');
return;
}
function processImage(data, options, callback) {
gm(data.Body).size(function (err, size) {
var scalingFactor = Math.min(
options.MAX_WIDTH / size.width,
options.MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
this.resize(width, height)
.quality(options.SIZING_QUALITY || 75)
.interlace(options.INTERLACE || 'None')
.toBuffer(scrExt, function (err, buffer) {
if (err) {
callback(err);
} else {
uploadImage(data.ContentType, buffer, options, callback);
uploadRecentImage(data.ContentType, buffer, options, callback);
}
});
});
}
function uploadImage(contentType, data, options, callback) {
// Upload the transformed image to the destination S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: options.MAX_WIDTH + '/' + srcName + '.' + scrExt,
Body: data,
ContentType: contentType
},
callback);
}
function uploadRecentImage(contentType, data, options, callback) {
if(options.MAX_WIDTH == 500){
s3.putObject({
Bucket: dstBucket,
Key: 'recent_optimized.' + scrExt,
Body: data,
ContentType: contentType
},
callback);
}
if(options.MAX_WIDTH == 100){
s3.putObject({
Bucket: dstBucket,
Key: 'recent_thumb.' + scrExt,
Body: data,
ContentType: contentType
},
callback);
}
}
// Download the image from S3 and process for each requested image variant.
async.waterfall(
[
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function processImages(data, next) {
async.each(imgVariants, function (variant, next) {
processImage(data, variant, next);
}, next);
}
],
function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket
);
}
context.done();
}
);
};
You can limit the number of parallel processImages calls:
Replace async.each(imgVariants,
with async.eachLimit(imgVariants, 2,
to not process more than two images in parallel.
The script has a bug:
uploadImage(data.ContentType, buffer, options, callback);
uploadRecentImage(data.ContentType, buffer, options, callback);
This will call callback twice which is not allowed. Only call the callback once!
The script has another bug: event.Records[0] it will only process the first image. If you upload multiple images at the same time this will miss some images.
Related
I have s3 bucket as below,
myBucket
a/
b/
c/
where myBucket is a s3 bucket and a, b, c are the key folders inside that bucket.
I will upload images into a/. The s3 event notification will trigger SQS which will then trigger lambda function which does the process of removing image background and uploads into b/ folder.
The problem here is for example, if I upload a folder which has around 26 images into s3 only 23 or 22 images are getting triggered by lambda and only those images are getting are processed.
For some reason s3 is not triggering all the images or is that something I should configure in my lambda function?
Here is my function code
exports.handler = async(event, context, callback) => {
try {
console.log(event.Records[0]);
var json = JSON.parse(event.Records[0]['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = json['Records'][0]['s3'];
console.log(json);
var srcBucket = json['bucket']['name'];
console.log('srcBucket: ' + srcBucket);
var srcKey = decodeURIComponent(json['object']['key'].replace(/\+/g, ' '));
console.log('srcKey: ' + srcKey);
var str = (srcKey.split('/').pop()).split('.')[0];
console.log('str: ' + str);
if (str != '') {
var folderPath = srcKey.substr(srcKey.indexOf('/') + 1).split('.')[0];
folderPath = folderPath.substring(0, folderPath.lastIndexOf('/'));
console.log('folderPath: ' + folderPath);
const params1 = { Bucket: srcBucket, Key: srcKey };
var origimage = await s3.getObject(params1).promise();
var destObject = await origimage.Body;
var destKey = 'removebg/' + folderPath + '/' + str + '.jpg';
var options = {
'method': 'POST',
'url': 'https://api.remove.bg/v1.0/removebg',
'headers': {
'X-Api-Key': 'xxxxxxxxxxx'
},
formData: {
'image_file': destObject,
'size': 'auto'
},
encoding: null
};
request(options, function(error, response, body) {
if (error) {
console.log(error);
sendmessage(error, 'Error removing image background', arn, srcBucket + '/' + srcKey, destBucket + destKey);
}
var params = { Bucket: destBucket, Key: destKey, Body: body };
s3.upload(params, function(err, data) {
if (err) {
console.log('Error uploading data: ', err);
sendmessage(err, 'Error uploading transparent image to s3', arn, srcBucket + '/' + srcKey, destBucket + destKey);
}
else { console.log('Successfully uploaded data to ' + destBucket); }
});
});
}
}
catch (e) {
console.log(e);
}
callback(null, 'All done!');
};
Please let me know. Thanks in advance.
I think your problem lies on these lines:
console.log(event.Records[0]);
var json = JSON.parse(event.Records[0]['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = json['Records'][0]['s3'];
Your function is only looking at the first record that is provided to the function. Multiple events can be given the the Lambda function, so your function should loop through the Records entries and process all of the events that are provided.
It should do something like:
for record in event.Records:
console.log(record);
var json = JSON.parse(record['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = record['s3'];
I am learning using AWS lambda functions. What I am trying to do is, when I upload an image (JPEG) file to the s3 bucket, the image will be resized. But it is not working. See what I have done below.
I create a folder. Then created a node_modules folder inside the previously created folder. Then created a file called CreateThumbnail.js inside the main folder.
This is the CreateThumbnail.js:
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
callback(null, "message");
}
);
};
Then I zipped the folder. Then I created a function in the AWS lambda console and upload the zip file from the UI as follow.
Then I added the s3 trigger as in the screenshot.
I also created the role with correct permission and policies.
But when I upload a JPG file to s3 bucket, it is neither resized not thumbnail is created. What could be wrong here?
This is the function policy:
Lambda functions send their debug information to Amazon CloudWatch Logs. Examine the log file to determine what went wrong.
If there is no log, then either the Lambda function never executed, or the Lambda function was not given sufficient permissions to write to CloudWatch Logs.
See: Accessing Amazon CloudWatch Logs for AWS Lambda
I've been using source code from AWS Lambda in Action - Poccia, to create users in a User Pool and Identity Pool. I keep getting the error:
Response:
{
"errorMessage": "RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Process exited before completing request"
}
Request ID:
"f6511085-f22c-11e7-be27-534dfc5d6456"
Function Logs:
START RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Version: $LATEST
2018-01-05T15:27:38.890Z f6511085-f22c-11e7-be27-534dfc5d6456 TypeError: Pass phrase must be a buffer
at TypeError (native)
at pbkdf2 (crypto.js:576:20)
at Object.exports.pbkdf2 (crypto.js:558:10)
at computeHash (/var/task/lib/cryptoUtils.js:10:10)
at InternalFieldObject.ondone (/var/task/lib/cryptoUtils.js:19:4)
END RequestId: f6511085-f22c-11e7-be27-534dfc5d6456
REPORT RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Duration: 113.62 ms Billed Duration: 200 ms Memory Size: 128 MB Max Memory Used: 33 MB
RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Process exited before completing request
I'm new to AWS Services and am not sure why this error is occurring. Below is the Lambda function I'm attempting to use and following is the cryptoUtils.js script it's referencing.
console.log('Loading function');
//Loading standard module, such as crypto and the AWS SDK
var AWS = require('aws-sdk');
var crypto = require('crypto');
var cryptoUtils = require('./lib/cryptoUtils.js'); //Loading the cryptoUtils.js module shared code, included in the uploaded ZIP archive
var config = require('./config.json'); //Loading the configuration in the config.json file, included in the uploaded ZIP archive
var dynamodb = new AWS.DynamoDB({
accessKeyId: 'usingKEYfromIAM',
secretAccessKey: 'usingKEYfromIAM',
}); //Getting the Amazon DynamoDB service object
var ses = new AWS.SES(); //Getting Amazon SES service object
function storeUser(email, password, salt, fn) { //The storeUser() function stores the new user in the DynamoDB table.
var len = 128;
crypto.randomBytes(len, function(err, token) { //Arandom token sent in the validation email and used to validate a user
if (err) return fn(err);
token = token.toString('hex');
dynamodb.putItem({ //Putting an item in the DynamoDB table
TableName: config.DDB_TABLE, //The table name is taken from the config.json configuration file.
//Most of the data is string ("S"), but the verifiede attribute is Boollean ("BOOL"),
//new users aren't verified (false), and the randomly generated token is stored in the "verifyToken" attribute
Item: {
email: {
S: email
},
passwordHash: {
S: password
},
passwordSalt: {
S: salt
},
verified: {
BOOL: false
},
verifyToken: {
S: token
}
},
ConditionExpression: 'attribute_not_exists (email)' //This condition avoids overwriting existing users (with the same email).
}, function(err, data) {
if (err) return fn(err);
else fn(null, token); //The storeUser() function returns the randomly generated token.
});
});
}
function sendVerificationEmail(email, token, fn) { //The send-VerificationEmail() funciton sends the verification email to the new user.
var subject = 'Verification Email for ' + config.EXTERNAL_NAME;
//The verification link, to the verify.hrml page, passes the randomly generated token as a query parameter.
var verificationLink = config.VERIFICATION_PAGE + '?email=' + encodeURIComponent(email) + '&verify=' + token;
ses.sendEmail({ //Sending the email in HTML format
Source: config.EMAIL_SOURCE,
Destination: {
ToAddresses: [
email
]
},
Message: {
Subject: {
Data: subject
},
Body: {
Html: {
Data: '<html><head>' + '<meta http-equiv= "Content-Type" content="test/html; charset=UTF-8" />' +
'<title>' + subject + '</title>' + '</head><body>' + 'Please <a href="' + verificationLink +
'">click here to verify your email address</a> or a copy & paste the following link in a browser:' +
'<br><br>' + '' + verificationLink + '' + '</body></html>'
}
}
}
}, fn);
}
exports.handler = (event, context, callback) => { //The function that's exported and can be invoked using AWS Lambda as createUser
//Getting the input parameters (email, password) from the event
var email = event.email;
var clearPassword = event.password;
//Using compute-Hash() from cryptoUtils.js to salt the password.
cryptoUtils.computeHash(clearPassword, function(err, salt, hash) {
if (err) {
callback('Error in hash: ' + err);
} else {
storeUser(email, hash, salt, function(err, token) { //Storing the user via the storeUser()function
if (err) {
if (err.code == 'ConditionalCheckFailedException') { //Checking if the database error is due to the email being already prsent in the database
//userID already found
callback(null, {
created: false
});
} else {
callback('Error in storUser: ' + err);
}
} else {
sendVerificationEmail(email, token, function(err, data) { //Sending the verification email
if (err) {
callback('Error in sendVerificationEmail: ' + err);
} else {
callback(null, {
created: true
});
}
});
}
});
}
});
};
var crypto = require('crypto');
function computeHash(password, salt, fn) {
var len = 512;
var iterations = 4096;
var digest = 'sha512';
if (3 == arguments.length) {
crypto.pbkdf2(password, salt, iterations, len, digest, function(err, derivedKey) {
if (err) return fn(err);
else fn(null, salt, derivedKey.toString('base64'));
});
} else {
fn = salt;
crypto.randomBytes(len, function(err, solat) {
if (err) return fn(err);
salt = salt.toString('base64');
computeHash(password, salt, fn);
});
}
}
module.exports.computeHash = computeHash;
If anybody has any suggestions or needs more information to help me determine why the error is occurring I would greatly appreciate it. Thank you.
The password you're passing is a number?
If so:
Convert it to String.
If you don't want to do that, you can pass a Buffer object:
Pass a Buffer object using the class Method Buffer.from(string[, encoding])
https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_string_encoding
crypto.pbkdf2(Buffer.from(password, 'utf8'), salt, iterations, len, digest, function(err, derivedKey) {
if (err) return fn(err);
else fn(null, salt, derivedKey.toString('base64'));
});
Hope it helps!
var crypto = require('crypto');
function computeHash(password, salt, fn) {
// Bytesize. The larger the numbers, the better the security, but the longer it will take to complete
var len = 512;
var iterations = 4096;
var digest = 'sha512';
if (3 == arguments.length) {
crypto.pbkdf2(Buffer.from(password, 'utf8'), salt, iterations, len, digest, function(err, derivedKey) {
if (err) return fn(err);
else fn(null, salt, derivedKey.toString('base64'));
});
} else {
fn = salt;
crypto.randomBytes(len, function(err, salt) {
if (err) return fn(err);
salt = salt.toString('base64');
computeHash(password, salt, fn);
});
}
}
module.exports.computeHash = computeHash;
Error "TypeError: Pass phrase must be a buffer"
does suggest trying a Buffer conversion of input String
Before your test of Buffer.from(password, 'utf8')
did you verify input value is encoding 'utf8',
and not some other encoding such as base64 or latin1 ?
List of encodings that Node.js supports
i stumbled across this function on the internet.
private createFileName() {
var d = new Date(),
n = d.getTime(),
newFileName = n + ".jpg";
return newFileName;
}
It's purpose is to rename an image file. The questions i have are
Does this restrict image upload to jpg only or does it convert it to
jpg.
If it's the former how can i get the image extension before upload
and modify private createFileName(ext) function like so, so as to upload
every image type.
If it would help here is the full code
lastImage: string = null;
loading: Loading;
public takePicture(sourceType) {
// Create options for the Camera Dialog
var options = {
quality: 100,
sourceType: sourceType,
saveToPhotoAlbum: false,
correctOrientation: true
};
// Get the data of an image
this.camera.getPicture(options).then((imagePath) => {
// Special handling for Android library
if (this.platform.is('android') && sourceType === this.camera.PictureSourceType.PHOTOLIBRARY) {
this.filePath.resolveNativePath(imagePath)
.then(filePath => {
let correctPath = filePath.substr(0, filePath.lastIndexOf('/') + 1);
let currentName = imagePath.substring(imagePath.lastIndexOf('/') + 1, imagePath.lastIndexOf('?'));
this.copyFileToLocalDir(correctPath, currentName, this.createFileName());
});
} else {
var currentName = imagePath.substr(imagePath.lastIndexOf('/') + 1);
var correctPath = imagePath.substr(0, imagePath.lastIndexOf('/') + 1);
this.copyFileToLocalDir(correctPath, currentName, this.createFileName());
}
}, (err) => {
this.presentToast('Error while selecting image.');
});
}
private copyFileToLocalDir(namePath, currentName, newFileName) {
this.file.copyFile(namePath, currentName, cordova.file.dataDirectory, newFileName).then(success => {
this.lastImage = newFileName;
}, error => {
this.presentToast('Error while storing file.');
});
}
private presentToast(text) {
let toast = this.toastCtrl.create({
message: text,
duration: 3000,
position: 'top'
});
toast.present();
}
// Always get the accurate path to your apps folder
public pathForImage(img) {
if (img === null) {
return '';
} else {
return cordova.file.dataDirectory + img;
}
}
public uploadImage() {
// Destination URL
var url = "http://my_url/uploads.php";
// File for Upload
var targetPath = this.pathForImage(this.lastImage);
// File name only
var filename = this.lastImage;
var options = {
fileKey: "file",
fileName: filename,
chunkedMode: false,
mimeType: "multipart/form-data",
params : {'fileName': filename}
};
const fileTransfer: TransferObject = this.transfer.create();
this.loading = this.loadingCtrl.create({
content: 'Uploading...',
});
this.loading.present();
// Use the FileTransfer to upload the image
fileTransfer.upload(targetPath, url, options).then(data => {
this.loading.dismissAll()
this.presentToast('Image succesfully uploaded.');
}, err => {
this.loading.dismissAll()
this.presentToast('Error while uploading file.');
});
}
1- It does not restrict to "jpg", and it does not convert to "jpg", as the method name says: create Filename, it creates a string, with the filename, just that.
2- The image extension will be jpg, as it is the default camera encodingType, and it seems like you are taking pictures using the camera. You can change the encodingtype in the camera options.
You can find all the camera options in https://ionicframework.com/docs/native/camera/
I have been trying to launch a new EC2 instance as well as add a piece of string data to my SQS through lambda in response to an object upload event in my s3 bucket.
I have been able to successfully to update my SQS but has been unable to initialise the new EC2 instance. Despite setting the time allocation to the lambda function to the maximum time of 5mins and increasing memory allocation, an operation timeout error continuously surface.
My code is as below. Can anyone point out what are the potential causes for such an error? While I have stuck my whole piece of code here for reference, the area concerning the launch is towards the end of the code.
Thank you so much!
console.log('Loading function');
var fs = require('fs');
var async = require('async');
var aws = require('aws-sdk');
var s3 = new aws.S3({ apiVersion: '2006-03-01' });
var sqs = new aws.SQS({apiVersion: '2012-11-05'});
var ecs = new aws.ECS({apiVersion: '2014-11-13'});
var ec2 = new aws.EC2({apiVersion: '2015-10-01'});
// Check if the given key suffix matches a suffix in the whitelist. Return true if it matches, false otherwise.
exports.checkS3SuffixWhitelist = function(key, whitelist) {
if(!whitelist){ return true; }
if(typeof whitelist == 'string'){ return key.match(whitelist + '$') }
if(Object.prototype.toString.call(whitelist) === '[object Array]') {
for(var i = 0; i < whitelist.length; i++) {
if(key.match(whitelist[i] + '$')) { return true; }
}
return false;
}
console.log(
'Unsupported whitelist type (' + Object.prototype.toString.call(whitelist) +
') for: ' + JSON.stringify(whitelist)
);
return false;
};
exports.handler = function(event, context) {
//console.log('Received event:', JSON.stringify(event, null, 2));
console.log('Received event:');
//Read in the configuration file
var config = JSON.parse(fs.readFileSync('config.json', 'utf8'));
if(!config.hasOwnProperty('s3_key_suffix_whitelist')) {
config.s3_key_suffix_whitelist = false;
}
console.log('Config: ' + JSON.stringify(config));
var name = event.Records[0].s3.object.key;
if(!exports.checkS3SuffixWhitelist(name, config.s3_key_suffix_whitelist)) {
context.fail('Suffix for key: ' + name + ' is not in the whitelist')
}
// Get the object from the event and show its key
var bucket = event.Records[0].s3.bucket.name;
var key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
var params = {
Bucket: bucket,
Key: key
};
s3.getObject(params, function(err, data) {
if (err) {
console.log(err);
var message = "Error getting object " + key + " from bucket " + bucket +
". Make sure they exist and your bucket is in the same region as this function.";
console.log(message);
context.fail(message);
} else {
console.log('CONTENT TYPE:', key);
context.succeed(key);
}
});
//Sending the image key as a message to SQS and starting a new instance on EC2
async.waterfall([
function(next){
var params = {
MessageBody: JSON.stringify(event),
QueueUrl: config.queue
};
console.log("IN QUEUE FUNCTION");
sqs.sendMessage(params, function (err, data) {
if (err) { console.warn('Error while sending message: ' + err); }
else { console.info('Message sent, ID: ' + data.MessageId); }
next(err);
});
},
function (next) {
console.log("INITIALIZING ECS");
var params = {
ImageId: 'ami-e559b485',
MinCount: 1,
MaxCount: 1,
DryRun: true,
InstanceType: 't2.micro',
KeyName: 'malpem2102',
SubnetId: 'subnet-e8607e8d'
}
ec2.runInstances(params, function(err,data){
if(err){
console.log(err, err.stack);
context.fail('Error', "Error getting file: " + err);
return;
} else{
var instanceId = data.Instances[0].InstanceId;
console.log("Created instance ", instanceId);
context.suceed("Created instance");
}
});
}
], function(err){
if (err) {
context.fail('An error has occurred: ' + err);
}
else {
context.succeed('Successfully processed Amazon S3 URL.');
}
}
);
};