How to Write and Read files to Lambda-AWS with Node.js - amazon-web-services

I am trying to write and read to /temp dir in lambda during function execution, I know the best way would be to use S3, but for this project, I have to use node file system
const fs = require('fs');
exports.handler = async (event) => {
const path = '/tem/hi.json';
const write = (file) => {
fs.writeFile(file, "Hello World!", function(err) {
if (err) return console.log(err);
return {
statusCode:200,
body:JSON.stringify('data was written')
};
});
};
return write(path);
};

You have a typo on your file path.
Change
const path = '/tem/hi.json';
to
const path = '/tmp/hi.json';
Also, fs.writeFile is an asynchronous operation. Promisify it so you can await on it:
const write = file => {
return new Promise((res, rej) => {
fs.writeFile(file, JSON.stringify({ message: 'hello world' }), (err) => {
if (err) {
return rej(err)
}
return res({
statusCode: 200,
body: JSON.stringify({message: 'File written successfully'})
})
})
})
}
Finally, on your client (last line of your handler), just invoke it like this:
return await write(path)

fs.writeFile is an asynchronous operation, so the lambda ends before it finishes. You can use fs.writeFileSync to block the lambda execution until the file is sucessfuly written:
const write = (file) => {
try {
fs.writeFileSync(file, "Hello World!");
return {
statusCode: 200,
body: 'data was written'
};
} catch (err) {
console.log(err);
return {
statusCode: 500,
body: 'data was not written'
};
}
};

Related

Handling multipart/form-data in aws lambda

I'm trying to send a request with an image to a lambda function through API gateway.
I'm using this piece of code to parse the form-data-object received by my lambda function. I then upload the image to S3, but when downloading and opening the image from S3, I see that it's corrupt.
I have tried the following npm packages:
parse-multipart
parse-multipart-data
Both do not work, because I get an empty parts-array. The piece of code I use does get results in the array, but the buffers seems to be corrupt.
The problem seems to be in this line of code:
Buffer.from(item.slice(item.search(/Content-Type:\s.+/g) + item.match(/Content-Type:\s.+/g)[0].length + 4, -4), 'binary')
Does anyone has a solution for me?
You can use busboy to parse the multipart form data before uploading to S3 as shown below:
// This code is written in ECMAScript 6 (ES6), not CommonJS syntax.
// So, make sure you add <"type": "module"> in your package.json.
import {S3Client, PutObjectCommand} from '#aws-sdk/client-s3';
import Busboy from 'busboy';
// Initialize the clients outside the function handler to take advantage of execution environment reuse.
const s3Client = new S3Client({region: process.env.AWS_REGION});
// Function handler.
export async function handler(event) {
const {
ContentType: contentType = '',
File: file = '',
} = await FORM.parse(event['body'], event['headers']);
try {
// Adds an object to a bucket. We must have `WRITE` permissions on a bucket to add an object to it.
await s3Client.send(new PutObjectCommand({
Body: file,
Bucket: 'BUCKET_NAME',
ContentType: contentType,
Key: 'SOME_KEY',
}));
return {
isBase64Encoded: false,
statusCode: 200,
body: JSON.stringify({
message: 'Everything is gonna be alright.',
}),
}
} catch (e) {
return {
isBase64Encoded: false,
statusCode: 404,
body: JSON.stringify(e),
}
}
}
const FORM = {
parse(body, headers) {
return new Promise((resolve, reject) => {
const data = {};
const buffer = Buffer.from(body, 'base64');
const bb = Busboy({
headers: Object.keys(headers).reduce((newHeaders, key) => {
// busboy expects lower-case headers.
newHeaders[key.toLowerCase()] = headers[key];
return newHeaders;
}, {}),
limits: {
fileSize: 10485760, // Set as desired.
files: 1,
},
});
bb.on('file', (name, stream, info) => {
const chunks = [];
stream.on('data', (chunk) => {
if (name === 'File') {
chunks.push(chunk);
}
}).on('limit', () => {
reject(new Error('File size limit has been reached.'));
}).on('close', () => {
if (name === 'File') {
data[name] = Buffer.concat(chunks);
data['ContentType'] = info.mimeType;
}
});
});
bb.on('error', (err) => {
reject(err);
});
bb.on('close', () => {
resolve(data);
});
bb.end(buffer);
});
}
};

Lambda not triggering codebuild to run?

I am trying to have lambda trigger a codebuild function when it hits the point within the lambda function, here is the current code im using for lamda:
console.log('Loading function');
const aws = require('aws-sdk');
const s3 = new aws.S3();
exports.handler = async (event, context) => {
const codebuild = new aws.CodeBuild();
let body = JSON.parse(event.body);
let key = body.model;
var getParams = {
Bucket: 'bucketname', // your bucket name,
Key: key + '/config/training_parameters.json' // path to the object you're looking for
}
if (key) {
const objects = await s3.listObjects({
Bucket: 'bucketname',
Prefix: key + "/data"
}).promise();
console.log(objects)
if (objects.Contents.length == 3) {
console.log("Pushing")
await s3.getObject(getParams, function(err, data) {
if (err)
console.log(err);
if (data) {
let objectData = JSON.parse(data.Body.toString('utf-8'));
const build = {
projectName: "projname",
environmentVariablesOverride: [
{
name: 'MODEL_NAME',
value: objectData.title,
type: 'PLAINTEXT',
},
]
};
console.log(objectData.title)
codebuild.startBuild(build,function(err, data){
if (err) {
console.log(err, err.stack);
}
else {
console.log(data);
}
});
console.log("Done with codebuild")
}
}).promise();
const message = {
'message': 'Execution started successfully!',
}
return {
'statusCode': 200,
'headers': {'Content-Type': 'application/json'},
'body': JSON.stringify(message)
};
}
}
};
Specifically this part should trigger it:
codebuild.startBuild(build,function(err, data){
if (err) {
console.log(err, err.stack);
}
else {
console.log(data);
}
});
But its not, it even outputs after the function? Im thinking its something to do with promises/await/async but cant find the right solution? Any help would be appreciated.
As you pointed out the problem is related to promises. Change your code like this:
const result = await codebuild.startBuild(build).promise();
And if you configured your lambda permissions for CodeBuild it should work.
You can change your s3.getObject the same way without the callback function:
const file = await s3.getObject(getParams).promise();
console.log(file.Body);

nonetype' object has no attribute 'decode' error when i upload the image to database

i am new to ionic4/angular4.i need to upload the profile pic to database.i wrote code but i don't know whether it is correct or not and when i am uploading it i am getting the above mentioned error. backed i am using Django and sorry for the bad indentation.i just beginner to programming.
.ts
async sendPictureToSomewhere() {
const fileuri = await this.getPicture();
const blobinfo = await this.b64toBlob(fileuri);
await this.upload(blobinfo);
alert("done");
}
async getPicture() {
const options: CameraOptions = {
quality: 100,
destinationType: this.camera.DestinationType.FILE_URI,
encodingType: this.camera.EncodingType.JPEG,
mediaType: this.camera.MediaType.PICTURE
// targetWidth: 200
};
let fileuri = await this.camera.getPicture(options);
return fileuri;
}
b64toBlob(_imagePath) {
return new Promise((resolve, reject) => {
let fileName = "";
this.file
.resolveLocalFilesystemUrl(_imagePath)
.then(fileEntry => {
let { name, nativeURL } = fileEntry;
// get the path..
let path = nativeURL.substring(0, nativeURL.lastIndexOf("/"));
console.log("path", path);
console.log("fileName", name);
fileName = name;
// we are provided the name, so now read the file into
// a buffer
return this.file.readAsArrayBuffer(path, name);
})
.then(buffer => {
// get the buffer and make a blob to be saved
let imgBlob = new Blob([buffer], {
type: "image/jpeg"
});
console.log(imgBlob.type, imgBlob.size);
resolve({
fileName,
imgBlob
});
})
.catch(e => reject(e));
});
}
upload(_Blobinfo) {
this.profileService.postInfluencerProfile(_Blobinfo, null,
null).subscribe(
response => {
console.log(response);
},
(error: MavinError) => {
if (error instanceof NotFoundError) {
alert("Not found");
} else {
console.log(error);
}
}
);
}
}

AWS Lambda - unable to convert SDK call to promise

I have a Lambda which looks like so:
module.exports.handler = (event, context, callback) => {
AWS.config.setPromisesDependency(null);
const uploadPromise = s3.upload(params).promise();
uploadPromise.then((data) => {
const response = {
...
};
return response;
}).catch((error) => {
console.log(error);
});
};
Calling it from Postman results in server error in Postman. CloudWatch logs have no further info.
Doing:
s3.upload(params, (error, data) => {
if (error) {
console.error("error occurred storing to s3: ", error);
return;
}
const response = {
...
};
return response;
});
does not result in a server error.
I am trying to follow the information from AWS that can be found here:
https://aws.amazon.com/blogs/developer/support-for-promises-in-the-sdk/
Postman is able to upload to Lambda by doing the following with async/await and try/catch:
exports.handler = async function(event, context) {
const s3 = new AWS.S3();
const encodedImage = util.inspect(event.body);
const decodedImage = Buffer.from(encodedImage, "base64");
const filePath = "test.png";
const params = {
Body: decodedImage,
Bucket: "my bucket",
Key: filePath,
ACL: "public-read",
ContentType: "mime/png"
};
try {
const result = await s3.upload(params).promise();
const response = {
statusCode: 200,
headers: {
my_header: "my_value"
},
body: JSON.stringify(result),
isBase64Encoded: false
};
return response;
} catch (error) {
console.log('error')
}
};

Lambda function s3.getObject returns "Internal server error"

This code works just fine locally using nodejs. Images download from s3, write to file.
However, in Lambda (using nodejs 8.10) I'm getting "Internal Server Error" when testing the function with this in the Logs:
"Execution failed due to configuration error: Malformed Lambda proxy response"
I am using the lambda proxy response in the callback, but clearly some AWS SDK error with S3 is not getting caught.
I do have a role setup with S3 full access that the Lambda has access to.
What am I missing with my first Lambda function? Docs and tutorials I've followed correctly and it is not working.
const async = require('async')
const aws = require('aws-sdk')
const fs = require('fs')
const exec = require('child_process').exec
const bucket = 'mybucket'
const s3Src = 'bucket_prefix'
const s3Dst = 'new_prefix'
const local = `${__dirname}/local/`
aws.config.region = 'us-west-2'
const s3 = new aws.S3()
exports.handler = async (event, context, callback) => {
const outputImage = 'hello_world.png'
const rack = JSON.parse(event.body)
const images = my.images
async.waterfall([
function download(next) {
let downloaded = 0
let errors = false
let errorMessages = []
for (let i = 0; i < images.length; i++) {
let key = `${s3Src}/${images[i].prefix}/${images[i].image}`,
localImage = `${local}${images[i].image}`
getBucketObject(bucket, key, localImage).then(() => {
++downloaded
if (downloaded === images.length) { // js is non blocking, need to check if all images have been downloaded. If so, then go to next function
if (errors) {
next(errorMessages.join(' '))
} else {
next(null)
}
}
}).catch(error => {
errorMessages.push(`${error} - ${localImage}`)
++downloaded
errors = true
})
}
function getBucketObject(bucket, key, dest) {
return new Promise((resolve, reject) => {
let ws = fs.createWriteStream(dest)
ws.once('error', (err) => {
return reject(err)
})
ws.once('finish', () => {
return resolve(dest)
})
let s3Stream = s3.getObject({
Bucket: bucket,
Key: key
}).createReadStream()
s3Stream.pause() // Under load this will prevent first few bytes from being lost
s3Stream.on('error', (err) => {
return reject(err)
})
s3Stream.pipe(ws)
s3Stream.resume()
})
}
}
], err => {
if (err) {
let response = {
"statusCode": 400,
"headers": {
"my_header": "my_value"
},
"body": JSON.stringify(err),
"isBase64Encoded": false
}
callback(null, response)
} else {
let response = {
"statusCode": 200,
"headers": {
"my_header": "my_value"
},
"body": JSON.stringify(`<img src="${local}${outputImage}" />`),
"isBase64Encoded": false
}
callback(null, response)
}
}
)
}
Response should be always sent to callback function. Your code sends response only on error. That's why Lambda executor thinks your code fails.
BTW - should your functions in async.waterfall be separated with coma, as two tasks?
Locally, I've been running nodejs 10.10 and lambda currently is at 8.10. That is a big part I'm sure. In the end I had to remove the async. I had to move the getBucketObject function out of the waterfall. Once I made those adjustments it started working. And another issue was the downloaded images needed to go into "/tmp" directory.
const aws = require('aws-sdk')
const async = require('async')
const fs = require('fs')
const bucket = 'mybucket'
const s3Src = 'mys3src'
const local = '/tmp/'
aws.config.region = 'us-west-2'
const s3 = new aws.S3()
exports.handler = (event, context, callback) => {
const outputImage = 'hello_world.png'
async.waterfall([
function download(next) {
let downloaded = 0,
errorMessages = []
for (let i = 0; i < event['images'].length; i++) {
let key = `${s3Src}/${event['images'][i]['prefix']}/${event['images'][i]['image']}`,
localImage = `${local}${event['images'][i]['image']}`
getBucketObject(bucket, key, localImage).then(() => {
downloaded++
if (downloaded === event['images'].length) {
if (errorMessages.length > 0) {
next(errorMessages.join(' '))
} else {
console.log('All downloaded')
next(null)
}
}
}).catch(error => {
downloaded++
errorMessages.push(`${error} - ${localImage}`)
if (downloaded === event['images'].length) {
next(errorMessages.join(' '))
}
})
}
}
], err => {
if (err) {
console.error(err)
callback(null, {
"statusCode": 400,
"body": JSON.stringify(err),
"isBase64Encoded": false
})
} else {
console.log('event image created!')
callback(null, {
"statusCode": 200,
"body": JSON.stringify(`<img src="${local}${outputImage}" />`),
"isBase64Encoded": false
})
}
}
)
}
function getBucketObject(bucket, key, dest) {
return new Promise((resolve, reject) => {
let ws = fs.createWriteStream(dest)
ws.once('error', (err) => {
return reject(err)
})
ws.once('finish', () => {
return resolve(dest)
})
let s3Stream = s3.getObject({
Bucket: bucket,
Key: key
}).createReadStream()
s3Stream.pause() // Under load this will prevent first few bytes from being lost
s3Stream.on('error', (err) => {
return reject(err)
})
s3Stream.pipe(ws)
s3Stream.resume()
})
}