Lambda function not calling S3 bucket upload - amazon-web-services

My Lambda function does not call the S3 upload function where it is supposed to send a URL back that will be assigned to the DynamoDB database. I can't seem to pin-point what's wrong here. I have tried to just call the Lambda upload to S3 function without the rest of the code and it work's fine.
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const BUCKET_NAME = 'BUCKET_NAME';
const dynamo = new AWS.DynamoDB.DocumentClient();
exports.handler = async (event, context) => {
let body;
let statusCode = 200;
const uploadFileToS3 = async (fileBinary) => {
try {
const base64File = fileBinary;
const decodedFile = Buffer.from(
base64File.replace(/^data:image\/\w+;base64,/, ''),
'base64'
);
const params = {
Bucket: BUCKET_NAME,
Key: `images/${new Date().toISOString()}.jpeg`,
Body: decodedFile,
ContentType: 'image/jpeg',
};
const uploadResult = await s3.upload(params).promise();
console.log(uploadResult)
return uploadResult;
} catch (e) {
console.error(e);
}
};
try {
switch (event.routeKey) {
case 'PUT /items':
let requestJSON = JSON.parse(event.body);
const fileURL = await uploadFileToS3(requestJSON.itemPicture);
await dynamo
.put({
TableName: 'TABLE_NAME',
Item: {
itemId: requestJSON.itemId,
userId: requestJSON.userId,
itemTitle: requestJSON.itemTitle,
itemDesc: requestJSON.itemDesc,
itemLocation: requestJSON.itemLocation,
itemPrice: requestJSON.itemPrice,
itemPicture: fileURL,
},
})
.promise();
body = `Put item ${requestJSON.itemId}`;
break;
default:
throw new Error(`Unsupported route: ` + `${event.routeKey}`);
}
} catch (err) {
statusCode = 400;
body = err.message;
} finally {
body = JSON.stringify(body);
}
return {
statusCode,
body,
headers,
};
};

Related

Image Upload with AWS using ID return from JWT

update
Ok so I am having a authentication issue with passport/JWT when trying to grab the header after it got set with login. So JWT should return an ID and I am trying to grab that ID and use it to update a user profile with a collection Image upload. Here is where it gets weird. I get this error in the console:
you are not valid
node:internal/errors:464
ErrorCaptureStackTrace(err);
^
Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
at new NodeError (node:internal/errors:371:5)
at ServerResponse.setHeader (node:_http_outgoing:576:11)
at ServerResponse.header (C:\Users\tquig\OneDrive\Documents\GitHub\Team2\backend\node_modules\express\lib\response.js:776:10)
at ServerResponse.send (C:\Users\tquig\OneDrive\Documents\GitHub\Team2\backend\node_modules\express\lib\response.js:170:12)
at ServerResponse.json (C:\Users\tquig\OneDrive\Documents\GitHub\Team2\backend\node_modules\express\lib\response.js:267:15)
at C:\Users\tquig\OneDrive\Documents\GitHub\Team2\backend\routes\users.js:239:29
at processTicksAndRejections (node:internal/process/task_queues:96:5) {
code: 'ERR_HTTP_HEADERS_SENT'
}
Now insomnia gives me back this error:
"AwsError: MissingRequiredParameter: Missing required key 'Key' in params"
I am not sure if AWS is just hating the JWT token or if my JWT id return isn't really returning properly and I am not doing it right in general.
Here is the AWS middleware:
const S3 = require('aws-sdk/clients/s3')
const fs = require('fs')
const bucketName = process.env.bucketName
const region = process.env.bucketRegion
const accessKeyId = process.env.AWSAccessKeyId
const secretAccessKey = process.env.AWSSecretKey
const s3 = new S3({
region,
accessKeyId,
secretAccessKey
})
function uploadFile(file){
const fileStream = fs.createReadStream(file.path)
const uploadParams = {
Bucket: bucketName,
Body: fileStream,
key: file.filename
}
return s3.upload(uploadParams).promise()
}
exports.uploadFile = uploadFile
//downloads a file from s3
//not tested yet
/*
function getFileStream(fileKey){
const downloadParams = {
key: fileKey
bucket: bucketName
}
return s3.getObject(downloadParams).createReadStream()
}
exports.getFileStream = getFileStream
*/
Here is the authentication code:
require('dotenv').config();
const jwt = require('jsonwebtoken');
const mongoose = require('mongoose');
// middleware functionality to check logged in user
module.exports = async (req, res, next) => {
try{
const token = req.cookies.jwt;
if(!token) return res.status(401).json({errorMessage: "Unauthorized"});
const verified = jwt.verify(token, process.env.secretKey);
req.user = verified.id;
next();
} catch (err){
console.error(err);
res.status(401).json({errorMessage: "Unauthorized"});
}
}
and here is the route:
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, "uploads");
},
filename: function (req, file, cb) {
cb(
null,
file.fieldname + "-" + Date.now() + path.extname(file.originalname)
);
},
});
const upload = multer({
storage: storage,
fileFilter: (req, file, cb) => {
if (file.mimetype == "image/png" || file.mimetype == "image/jpg" || file.mimetype == "image/jpeg") {
cb(null, true);
} else {
cb(null, false);
return cb(new Error('Only .png, .jpg and .jpeg format allowed!'));
}
} });
router.post("/collections", requireLogin, upload.single("myImage"), async (req, res) => {
const obj = {
img: {
data: req.file.filename,
contentType: req.file.contentType
}
}
const newCollection = new collections({
imgName: req.file.filename,
image: obj.img
});
const findBool = user.findById(req.body.id)
.then(() => res.json('found user'))
.catch(err => res.status(400).json('UserIdError: ' + err));
//need another check before uploading to aws to prevent unauthorized uploads
if (findBool){
//upload to aws
await uploadFile(req.file).then(() => res.json('AWS upload Complete'))
.catch(err => res.status(400).json('AwsError: ' + err))
//mongodb upload
try {
await user.findbyId(req.body.id).insertOne(newCollection)
} catch (error) {
res.status(400).json('updateError: ' + error)
}
//delete file from local storage
unlinkFile(file.path)
}
});

Lambda not triggering codebuild to run?

I am trying to have lambda trigger a codebuild function when it hits the point within the lambda function, here is the current code im using for lamda:
console.log('Loading function');
const aws = require('aws-sdk');
const s3 = new aws.S3();
exports.handler = async (event, context) => {
const codebuild = new aws.CodeBuild();
let body = JSON.parse(event.body);
let key = body.model;
var getParams = {
Bucket: 'bucketname', // your bucket name,
Key: key + '/config/training_parameters.json' // path to the object you're looking for
}
if (key) {
const objects = await s3.listObjects({
Bucket: 'bucketname',
Prefix: key + "/data"
}).promise();
console.log(objects)
if (objects.Contents.length == 3) {
console.log("Pushing")
await s3.getObject(getParams, function(err, data) {
if (err)
console.log(err);
if (data) {
let objectData = JSON.parse(data.Body.toString('utf-8'));
const build = {
projectName: "projname",
environmentVariablesOverride: [
{
name: 'MODEL_NAME',
value: objectData.title,
type: 'PLAINTEXT',
},
]
};
console.log(objectData.title)
codebuild.startBuild(build,function(err, data){
if (err) {
console.log(err, err.stack);
}
else {
console.log(data);
}
});
console.log("Done with codebuild")
}
}).promise();
const message = {
'message': 'Execution started successfully!',
}
return {
'statusCode': 200,
'headers': {'Content-Type': 'application/json'},
'body': JSON.stringify(message)
};
}
}
};
Specifically this part should trigger it:
codebuild.startBuild(build,function(err, data){
if (err) {
console.log(err, err.stack);
}
else {
console.log(data);
}
});
But its not, it even outputs after the function? Im thinking its something to do with promises/await/async but cant find the right solution? Any help would be appreciated.
As you pointed out the problem is related to promises. Change your code like this:
const result = await codebuild.startBuild(build).promise();
And if you configured your lambda permissions for CodeBuild it should work.
You can change your s3.getObject the same way without the callback function:
const file = await s3.getObject(getParams).promise();
console.log(file.Body);

aws cognito redirect after user confirmation

i have followed all the solution and answer from this stack overflow question
How to redirect after confirm amazon cognito using confirmation URL?
but I am getting this error
{"errorType":"TypeError","errorMessage":"Cannot read property 'code' of undefined","trace":["TypeError: Cannot read property 'code' of undefined"," at Runtime.module.exports.handler (/var/task/index.js:10:54)"," at Runtime.handleOnce (/var/runtime/Runtime.js:66:25)"]}
code of customize message lambda is:
exports.handler = (event, context, callback) => {
// Identify why was this function invoked
if(event.triggerSource === "CustomMessage_SignUp") {
console.log('function triggered');
console.log(event);
// Ensure that your message contains event.request.codeParameter. This is the placeholder for code that will be sent
const { codeParameter } = event.request
const { userName, region } = event
const { clientId } = event.callerContext
const { email } = event.request.userAttributes
const url = 'https://xxxxxxxxx.execute-api.ap-southeast-1.amazonaws.com/confirm'
const link = `here`
event.response.emailSubject = "Your verification link"; // event.request.codeParameter
event.response.emailMessage = `Thank you for signing up. Click ${link} to verify your email.`;
}
// Return to Amazon Cognito
callback(null, event);
};
code for redirect lambda is
'use strict';
var AWS = require('aws-sdk');
AWS.config.setPromisesDependency(require('bluebird'));
var CognitoIdentityServiceProvider = new AWS.CognitoIdentityServiceProvider({ apiVersion: '2016-04-19', region: process.env.REGION });
module.exports.handler = (req, context, callback) => {
console.log('req');
console.log(req);
const confirmationCode = req.queryStringParameters.code
const username = req.queryStringParameters.username
const clientId = req.queryStringParameters.clientId
const region = req.queryStringParameters.region
const email = req.queryStringParameters.email
let params = {
ClientId: clientId,
ConfirmationCode: confirmationCode,
Username: username
}
var confirmSignUp = CognitoIdentityServiceProvider.confirmSignUp(params).promise()
confirmSignUp.then(
(data) => {
let redirectUrl = process.env.POST_REGISTRATION_VERIFICATION_REDIRECT_URL;
const response = {
statusCode: 301,
headers: {
Location: redirectUrl,
}
};
return callback(null, response);
}
).catch(
(error) => {
callback(error)
}
)
}
customize_message lambda is triggered by the aws cognito which send
code for verification
redirect lambda is triggered by the API gateway
please help
thank you in advance
sometihing wrong with access your req parameter. try to use this code
'use strict';
var AWS = require('aws-sdk');
AWS.config.setPromisesDependency(require('bluebird'));
var CognitoIdentityServiceProvider = new AWS.CognitoIdentityServiceProvider({ apiVersion: '2016-04-19', region: process.env.REGION });
module.exports.handler = (req, context, callback) => {
console.log('req');
console.log(req);
const confirmationCode = req.code;
const username = req.username;
const clientId = req.clientId;
const region = req.region;
const email = req.email;
let params = {
ClientId: clientId,
ConfirmationCode: confirmationCode,
Username: username
}
var confirmSignUp = CognitoIdentityServiceProvider.confirmSignUp(params).promise()
confirmSignUp.then(
(data) => {
let redirectUrl = process.env.POST_REGISTRATION_VERIFICATION_REDIRECT_URL;
const response = {
statusCode: 301,
headers: {
Location: redirectUrl,
}
};
return callback(null, response);
}
).catch(
(error) => {
callback(error)
}
)
}
instead of your code.
only change is instead of all this
const confirmationCode = req.queryStringParameters.code;
const username = req.queryStringParameters.username;
const clientId = req.queryStringParameters.clientId;
const region = req.queryStringParameters.region;
const email = req.queryStringParameters.email;
you need to use this
const confirmationCode = req.code;
const username = req.username;
const clientId = req.clientId;
const region = req.region;
const email = req.email;

AWS Lambda - unable to convert SDK call to promise

I have a Lambda which looks like so:
module.exports.handler = (event, context, callback) => {
AWS.config.setPromisesDependency(null);
const uploadPromise = s3.upload(params).promise();
uploadPromise.then((data) => {
const response = {
...
};
return response;
}).catch((error) => {
console.log(error);
});
};
Calling it from Postman results in server error in Postman. CloudWatch logs have no further info.
Doing:
s3.upload(params, (error, data) => {
if (error) {
console.error("error occurred storing to s3: ", error);
return;
}
const response = {
...
};
return response;
});
does not result in a server error.
I am trying to follow the information from AWS that can be found here:
https://aws.amazon.com/blogs/developer/support-for-promises-in-the-sdk/
Postman is able to upload to Lambda by doing the following with async/await and try/catch:
exports.handler = async function(event, context) {
const s3 = new AWS.S3();
const encodedImage = util.inspect(event.body);
const decodedImage = Buffer.from(encodedImage, "base64");
const filePath = "test.png";
const params = {
Body: decodedImage,
Bucket: "my bucket",
Key: filePath,
ACL: "public-read",
ContentType: "mime/png"
};
try {
const result = await s3.upload(params).promise();
const response = {
statusCode: 200,
headers: {
my_header: "my_value"
},
body: JSON.stringify(result),
isBase64Encoded: false
};
return response;
} catch (error) {
console.log('error')
}
};

Lambda function s3.getObject returns "Internal server error"

This code works just fine locally using nodejs. Images download from s3, write to file.
However, in Lambda (using nodejs 8.10) I'm getting "Internal Server Error" when testing the function with this in the Logs:
"Execution failed due to configuration error: Malformed Lambda proxy response"
I am using the lambda proxy response in the callback, but clearly some AWS SDK error with S3 is not getting caught.
I do have a role setup with S3 full access that the Lambda has access to.
What am I missing with my first Lambda function? Docs and tutorials I've followed correctly and it is not working.
const async = require('async')
const aws = require('aws-sdk')
const fs = require('fs')
const exec = require('child_process').exec
const bucket = 'mybucket'
const s3Src = 'bucket_prefix'
const s3Dst = 'new_prefix'
const local = `${__dirname}/local/`
aws.config.region = 'us-west-2'
const s3 = new aws.S3()
exports.handler = async (event, context, callback) => {
const outputImage = 'hello_world.png'
const rack = JSON.parse(event.body)
const images = my.images
async.waterfall([
function download(next) {
let downloaded = 0
let errors = false
let errorMessages = []
for (let i = 0; i < images.length; i++) {
let key = `${s3Src}/${images[i].prefix}/${images[i].image}`,
localImage = `${local}${images[i].image}`
getBucketObject(bucket, key, localImage).then(() => {
++downloaded
if (downloaded === images.length) { // js is non blocking, need to check if all images have been downloaded. If so, then go to next function
if (errors) {
next(errorMessages.join(' '))
} else {
next(null)
}
}
}).catch(error => {
errorMessages.push(`${error} - ${localImage}`)
++downloaded
errors = true
})
}
function getBucketObject(bucket, key, dest) {
return new Promise((resolve, reject) => {
let ws = fs.createWriteStream(dest)
ws.once('error', (err) => {
return reject(err)
})
ws.once('finish', () => {
return resolve(dest)
})
let s3Stream = s3.getObject({
Bucket: bucket,
Key: key
}).createReadStream()
s3Stream.pause() // Under load this will prevent first few bytes from being lost
s3Stream.on('error', (err) => {
return reject(err)
})
s3Stream.pipe(ws)
s3Stream.resume()
})
}
}
], err => {
if (err) {
let response = {
"statusCode": 400,
"headers": {
"my_header": "my_value"
},
"body": JSON.stringify(err),
"isBase64Encoded": false
}
callback(null, response)
} else {
let response = {
"statusCode": 200,
"headers": {
"my_header": "my_value"
},
"body": JSON.stringify(`<img src="${local}${outputImage}" />`),
"isBase64Encoded": false
}
callback(null, response)
}
}
)
}
Response should be always sent to callback function. Your code sends response only on error. That's why Lambda executor thinks your code fails.
BTW - should your functions in async.waterfall be separated with coma, as two tasks?
Locally, I've been running nodejs 10.10 and lambda currently is at 8.10. That is a big part I'm sure. In the end I had to remove the async. I had to move the getBucketObject function out of the waterfall. Once I made those adjustments it started working. And another issue was the downloaded images needed to go into "/tmp" directory.
const aws = require('aws-sdk')
const async = require('async')
const fs = require('fs')
const bucket = 'mybucket'
const s3Src = 'mys3src'
const local = '/tmp/'
aws.config.region = 'us-west-2'
const s3 = new aws.S3()
exports.handler = (event, context, callback) => {
const outputImage = 'hello_world.png'
async.waterfall([
function download(next) {
let downloaded = 0,
errorMessages = []
for (let i = 0; i < event['images'].length; i++) {
let key = `${s3Src}/${event['images'][i]['prefix']}/${event['images'][i]['image']}`,
localImage = `${local}${event['images'][i]['image']}`
getBucketObject(bucket, key, localImage).then(() => {
downloaded++
if (downloaded === event['images'].length) {
if (errorMessages.length > 0) {
next(errorMessages.join(' '))
} else {
console.log('All downloaded')
next(null)
}
}
}).catch(error => {
downloaded++
errorMessages.push(`${error} - ${localImage}`)
if (downloaded === event['images'].length) {
next(errorMessages.join(' '))
}
})
}
}
], err => {
if (err) {
console.error(err)
callback(null, {
"statusCode": 400,
"body": JSON.stringify(err),
"isBase64Encoded": false
})
} else {
console.log('event image created!')
callback(null, {
"statusCode": 200,
"body": JSON.stringify(`<img src="${local}${outputImage}" />`),
"isBase64Encoded": false
})
}
}
)
}
function getBucketObject(bucket, key, dest) {
return new Promise((resolve, reject) => {
let ws = fs.createWriteStream(dest)
ws.once('error', (err) => {
return reject(err)
})
ws.once('finish', () => {
return resolve(dest)
})
let s3Stream = s3.getObject({
Bucket: bucket,
Key: key
}).createReadStream()
s3Stream.pause() // Under load this will prevent first few bytes from being lost
s3Stream.on('error', (err) => {
return reject(err)
})
s3Stream.pipe(ws)
s3Stream.resume()
})
}