I have created an AWS Lambda function to resize any image that is put in my s3 bucket. I have created an event for every Object creation in s3 bucket for the Lambda function to trigger. My Handler code for Lambda is as follows:
var async = require('async'),
gm = require('gm').subClass({
imageMagick: true
}) // Enable ImageMagick integration.
, request = require('request').defaults({
encoding: null
});
var knox = require('knox');
var client = knox.createClient({
key: 'myKey',
secret: 'mySecretKey',
bucket: 'MyBucketName'
});
//hardcoded Value to get the Image url
var s3value = '.s3.amazonaws.com/';
var https = 'https://'
exports.handler = function (event, context) {
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var imageUrl = https + srcBucket + s3value + srcKey //My Http Url for image stored in s3 bucket.
request(imageUrl, function (err, res, res1) {
if (err) {
console.log(err);
} else {
gm(res1).resize(120)
.toBuffer('jpg', function (err, buffer) {
if (err) {
console.log(err);
} else {
var ImageName = "/" + imageUrl.substr(0, imageUrl.indexOf(".")) + "-1x" + imageUrl.substr(imageUrl.indexOf("."), imageUrl.length); //Renaming the present image
client.putBuffer(buffer, ImageName, function (err, response1) {
if (err) {
console.log(err);
} else {
console.log(response1.socket._httpMessage.url);
}
});
}
});
}
});
context.done();
}
My Problem is that when I store an image in S3 the resized image is not getting created in my s3 Bucket. I am unable to understand why I am unable to create a resized image in the same s3 bucket. Can somebody help me with it?
It looks like you are calling context.done() outside the callback function for request(). This will end the Lambda function before any image resizing completes. You should only call context.done() when all processing has completed or failed.
exports.handler = function (event, context) {
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var imageUrl = https + srcBucket + s3value + srcKey //My Http Url for image stored in s3 bucket.
request(imageUrl, function (err, res, res1) {
if (err) {
console.log(err);
context.fail(err);
} else {
gm(res1).resize(120)
.toBuffer('jpg', function (err, buffer) {
if (err) {
console.log(err);
context.fail(err);
} else {
var ImageName = "/" + imageUrl.substr(0, imageUrl.indexOf(".")) + "-1x" + imageUrl.substr(imageUrl.indexOf("."), imageUrl.length); //Renaming the present image
client.putBuffer(buffer, ImageName, function (err, response1) {
if (err) {
console.log(err);
context.fail(err);
} else {
console.log(response1.socket._httpMessage.url);
context.succeed("It worked");
}
});
}
});
}
});
// Don't call context.done() here, the callback hasn't run yet
//context.done();
}
Related
My Lambda function does not call the S3 upload function where it is supposed to send a URL back that will be assigned to the DynamoDB database. I can't seem to pin-point what's wrong here. I have tried to just call the Lambda upload to S3 function without the rest of the code and it work's fine.
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const BUCKET_NAME = 'BUCKET_NAME';
const dynamo = new AWS.DynamoDB.DocumentClient();
exports.handler = async (event, context) => {
let body;
let statusCode = 200;
const uploadFileToS3 = async (fileBinary) => {
try {
const base64File = fileBinary;
const decodedFile = Buffer.from(
base64File.replace(/^data:image\/\w+;base64,/, ''),
'base64'
);
const params = {
Bucket: BUCKET_NAME,
Key: `images/${new Date().toISOString()}.jpeg`,
Body: decodedFile,
ContentType: 'image/jpeg',
};
const uploadResult = await s3.upload(params).promise();
console.log(uploadResult)
return uploadResult;
} catch (e) {
console.error(e);
}
};
try {
switch (event.routeKey) {
case 'PUT /items':
let requestJSON = JSON.parse(event.body);
const fileURL = await uploadFileToS3(requestJSON.itemPicture);
await dynamo
.put({
TableName: 'TABLE_NAME',
Item: {
itemId: requestJSON.itemId,
userId: requestJSON.userId,
itemTitle: requestJSON.itemTitle,
itemDesc: requestJSON.itemDesc,
itemLocation: requestJSON.itemLocation,
itemPrice: requestJSON.itemPrice,
itemPicture: fileURL,
},
})
.promise();
body = `Put item ${requestJSON.itemId}`;
break;
default:
throw new Error(`Unsupported route: ` + `${event.routeKey}`);
}
} catch (err) {
statusCode = 400;
body = err.message;
} finally {
body = JSON.stringify(body);
}
return {
statusCode,
body,
headers,
};
};
update
Ok so I am having a authentication issue with passport/JWT when trying to grab the header after it got set with login. So JWT should return an ID and I am trying to grab that ID and use it to update a user profile with a collection Image upload. Here is where it gets weird. I get this error in the console:
you are not valid
node:internal/errors:464
ErrorCaptureStackTrace(err);
^
Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
at new NodeError (node:internal/errors:371:5)
at ServerResponse.setHeader (node:_http_outgoing:576:11)
at ServerResponse.header (C:\Users\tquig\OneDrive\Documents\GitHub\Team2\backend\node_modules\express\lib\response.js:776:10)
at ServerResponse.send (C:\Users\tquig\OneDrive\Documents\GitHub\Team2\backend\node_modules\express\lib\response.js:170:12)
at ServerResponse.json (C:\Users\tquig\OneDrive\Documents\GitHub\Team2\backend\node_modules\express\lib\response.js:267:15)
at C:\Users\tquig\OneDrive\Documents\GitHub\Team2\backend\routes\users.js:239:29
at processTicksAndRejections (node:internal/process/task_queues:96:5) {
code: 'ERR_HTTP_HEADERS_SENT'
}
Now insomnia gives me back this error:
"AwsError: MissingRequiredParameter: Missing required key 'Key' in params"
I am not sure if AWS is just hating the JWT token or if my JWT id return isn't really returning properly and I am not doing it right in general.
Here is the AWS middleware:
const S3 = require('aws-sdk/clients/s3')
const fs = require('fs')
const bucketName = process.env.bucketName
const region = process.env.bucketRegion
const accessKeyId = process.env.AWSAccessKeyId
const secretAccessKey = process.env.AWSSecretKey
const s3 = new S3({
region,
accessKeyId,
secretAccessKey
})
function uploadFile(file){
const fileStream = fs.createReadStream(file.path)
const uploadParams = {
Bucket: bucketName,
Body: fileStream,
key: file.filename
}
return s3.upload(uploadParams).promise()
}
exports.uploadFile = uploadFile
//downloads a file from s3
//not tested yet
/*
function getFileStream(fileKey){
const downloadParams = {
key: fileKey
bucket: bucketName
}
return s3.getObject(downloadParams).createReadStream()
}
exports.getFileStream = getFileStream
*/
Here is the authentication code:
require('dotenv').config();
const jwt = require('jsonwebtoken');
const mongoose = require('mongoose');
// middleware functionality to check logged in user
module.exports = async (req, res, next) => {
try{
const token = req.cookies.jwt;
if(!token) return res.status(401).json({errorMessage: "Unauthorized"});
const verified = jwt.verify(token, process.env.secretKey);
req.user = verified.id;
next();
} catch (err){
console.error(err);
res.status(401).json({errorMessage: "Unauthorized"});
}
}
and here is the route:
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, "uploads");
},
filename: function (req, file, cb) {
cb(
null,
file.fieldname + "-" + Date.now() + path.extname(file.originalname)
);
},
});
const upload = multer({
storage: storage,
fileFilter: (req, file, cb) => {
if (file.mimetype == "image/png" || file.mimetype == "image/jpg" || file.mimetype == "image/jpeg") {
cb(null, true);
} else {
cb(null, false);
return cb(new Error('Only .png, .jpg and .jpeg format allowed!'));
}
} });
router.post("/collections", requireLogin, upload.single("myImage"), async (req, res) => {
const obj = {
img: {
data: req.file.filename,
contentType: req.file.contentType
}
}
const newCollection = new collections({
imgName: req.file.filename,
image: obj.img
});
const findBool = user.findById(req.body.id)
.then(() => res.json('found user'))
.catch(err => res.status(400).json('UserIdError: ' + err));
//need another check before uploading to aws to prevent unauthorized uploads
if (findBool){
//upload to aws
await uploadFile(req.file).then(() => res.json('AWS upload Complete'))
.catch(err => res.status(400).json('AwsError: ' + err))
//mongodb upload
try {
await user.findbyId(req.body.id).insertOne(newCollection)
} catch (error) {
res.status(400).json('updateError: ' + error)
}
//delete file from local storage
unlinkFile(file.path)
}
});
I am trying to have lambda trigger a codebuild function when it hits the point within the lambda function, here is the current code im using for lamda:
console.log('Loading function');
const aws = require('aws-sdk');
const s3 = new aws.S3();
exports.handler = async (event, context) => {
const codebuild = new aws.CodeBuild();
let body = JSON.parse(event.body);
let key = body.model;
var getParams = {
Bucket: 'bucketname', // your bucket name,
Key: key + '/config/training_parameters.json' // path to the object you're looking for
}
if (key) {
const objects = await s3.listObjects({
Bucket: 'bucketname',
Prefix: key + "/data"
}).promise();
console.log(objects)
if (objects.Contents.length == 3) {
console.log("Pushing")
await s3.getObject(getParams, function(err, data) {
if (err)
console.log(err);
if (data) {
let objectData = JSON.parse(data.Body.toString('utf-8'));
const build = {
projectName: "projname",
environmentVariablesOverride: [
{
name: 'MODEL_NAME',
value: objectData.title,
type: 'PLAINTEXT',
},
]
};
console.log(objectData.title)
codebuild.startBuild(build,function(err, data){
if (err) {
console.log(err, err.stack);
}
else {
console.log(data);
}
});
console.log("Done with codebuild")
}
}).promise();
const message = {
'message': 'Execution started successfully!',
}
return {
'statusCode': 200,
'headers': {'Content-Type': 'application/json'},
'body': JSON.stringify(message)
};
}
}
};
Specifically this part should trigger it:
codebuild.startBuild(build,function(err, data){
if (err) {
console.log(err, err.stack);
}
else {
console.log(data);
}
});
But its not, it even outputs after the function? Im thinking its something to do with promises/await/async but cant find the right solution? Any help would be appreciated.
As you pointed out the problem is related to promises. Change your code like this:
const result = await codebuild.startBuild(build).promise();
And if you configured your lambda permissions for CodeBuild it should work.
You can change your s3.getObject the same way without the callback function:
const file = await s3.getObject(getParams).promise();
console.log(file.Body);
I am currently trying to load images from my website to AWS S3. I have the functionality working where it uploads the image to the server but when i try to view the images they download instead of displaying. I read there is a way to set the file type so this would not happen. I am not sure how to do that. Any help would be great.
router.post('/heroes/createNewHeroes', function(req,res) {
var formidable = require('formidable'),
http = require('http'),
util = require('util');
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files) {
console.log(fields);
console.log(files);
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
var shortid = require('shortid');
var fs = require('fs');
var fileStream = fs.createReadStream(files.asset.path);
var newFilename = shortid.generate()+"_"+files.asset.name;
// Set your region for future requests.
AWS.config.region = 'us-west-2';
AWS.config.accessKeyId = 'access Key';
AWS.config.secretAccessKey = 'secret Key';
console.log(newFilename);
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3bucket = new AWS.S3({params: {Bucket: ' '}});
s3bucket.createBucket(function() {
var params = {Key: newFilename, Body: fileStream};
s3bucket.upload(params, function(err, data) {
if (err) {
console.log("Error uploading data: ", err);
} else {
console.log("Successfully uploaded data");
projectX.createHeroes(['plantTypes', 'asset', 'cost', 'energy', 'isSunProducer', 'isShooter', 'isExploding', 'sunFrequency', 'shootingFrequency', 'damage'], [fields.plantTypes, newFilename, fields.cost, fields.energy, fields.isSunProducer, fields.isShooter, fields.isExploding, fields.sunFrequency, fields.shootingFrequency, fields.damage], function(data){
res.redirect('/heroes')
});
}
});
});
});
});
});
var params = {Key: newFilename, ContentType: 'image/png', Body: fileStream};
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property
Just put "contentType: multerS3.AUTO_CONTENT_TYPE " . It will work .
Ex:
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'some-bucket',
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function (req, file, cb) {
cb(null, Date.now().toString())
}
})
})
Visit this link for more details https://github.com/badunk/multer-s3
This Helped me
storage: multerS3({
s3: s3,
bucket: "bucketname",
acl: "public-read",
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function(req, file, cb) {
console.log("req.file", file);
cb(null, `${Date.now()}-${file.originalname}`);
}
})
I am working in a AWS Lambda function. I am successfully making an API call to the NASA APOD and getting back the values. I want to take the url for the image and download that image and then upload into S3. I am getting an error when I try to access the "test.jpg" image, "Error: EACCES: permission denied, open 'test.jpg'". If I move the S3bucket.putObject outside the http.request, I get data is equal to null. I know I am missing something simple. Thought?
function GetAPOD(intent, session, callback) {
var nasa_api_key = 'demo-key'
, nasa_api_path = '/planetary/apod?api_key=' + nasa_api_key;
var options = {
host: 'api.nasa.gov',
port: 443,
path: nasa_api_path,
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf-8');
var responseString = '';
res.on('data', function (data) {
responseString += data;
});
res.on('end', function () {
console.log('API Response: ' + responseString);
var responseObject = JSON.parse(responseString)
, image_date = responseObject['date']
, image_title = responseObject['title']
, image_url = responseObject['url']
, image_hdurl = responseObject['hdurl']
, image_desc = responseObject['explanation'];
var s3Bucket = new AWS.S3( { params: {Bucket: 'nasa-apod'} } );
var fs = require('fs');
var file = fs.createWriteStream("test.jpg");
var request = http.get(image_url, function(response) {
response.pipe(file);
var data = {Key: "test.jpg", Body: file};
s3Bucket.putObject(data, function(err, data) {
if (err) {
console.log('Error uploading data: ', data);
}
else {
console.log('succesfully uploaded the image!');
}
});
});
});
});
req.on('error', function (e) {
console.error('HTTP error: ' + e.message);
});
//req.write();
req.end();
}
You need to be writing the file to /tmp. That's the only directory in the Lambda environment that you will have write access to.
I got it!! Thank you Mark B for the help. I was able to get the data from the stream without saving it locally and then writing to the bucket. I did have to change my IAM role to allow the putObject for S3.
function GetAPOD(intent, session, callback) {
var nasa_api_key = 'demo-key'
, nasa_api_path = '/planetary/apod?api_key=' + nasa_api_key;
var options = {
host: 'api.nasa.gov',
port: 443,
path: nasa_api_path,
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf-8');
var responseString = '';
res.on('data', function (data) {
responseString += data;
});
res.on('end', function () {
// console.log('API Response: ' + responseString);
var responseObject = JSON.parse(responseString)
, image_date = responseObject['date']
, image_title = responseObject['title']
, image_url = responseObject['url']
, image_hdurl = responseObject['hdurl']
, image_desc = responseObject['explanation'];
var image_name = image_date + '.jpg';
var s3 = new AWS.S3();
var s3Bucket = new AWS.S3( { params: {Bucket: 'nasa-apod'} } );
var request = http.get(image_url, function(response) {
var image_stream = null;
response.on('data', function (data) {
image_stream = data;
});
response.on('end', function () {
var param_data = {Key: image_name, Body: image_stream, ContentType: "image/jpeg", ContentLength: response.headers['content-length']};
s3Bucket.putObject(param_data, function(err, output_data) {
if (err) {
console.log('Error uploading data to S3: ' + err);
}
});
});
});
request.end();
});
});
req.on('error', function (e) {
console.error('HTTP error: ' + e.message);
});
req.end();
}