I need to be able to write some custom text on an image I have, which is stored in S3. What's the best way to do this in the AWS ecosystem?
If possible, I would like to avoid having to start up a server and run PHP or node on it, I would just want use lambda functions or something similar.
Use imagemagick via node.js on Lambda. Lots of people have done this. Here is a good example: https://tech.mybuilder.com/memes-as-a-service-using-lambda-serverless-and-imagemagick/
The lambda function looks like this:
'use strict';
const gm = require('gm').subClass({ imageMagick: true });
const fs = require('fs');
const { IMAGES_DIR, TEXT_SIZE, TEXT_PADDING } = process.env;
const parseText = text => (text || '').toUpperCase();
const getImages = () => fs.readdirSync(IMAGES_DIR);
const parseImage = image => getImages().find(file => file.indexOf(image) === 0);
const random = arr => arr[Math.floor(Math.random() * arr.length)];
const randomImage = () => random(getImages());
module.exports.meme = (event, context, callback) => {
const input = event.queryStringParameters || {};
const top = parseText(input.top);
const bottom = parseText(input.bottom);
const image = parseImage(input.image) || randomImage();
const meme = gm(`${IMAGES_DIR}${image}`);
meme.size(function (err, { height }) {
meme
.font('./impact.ttf', TEXT_SIZE)
.fill('white')
.stroke('black', 2)
.drawText(0, -(height / 2 - TEXT_PADDING), top, 'center')
.drawText(0, height / 2 - TEXT_PADDING, bottom, 'center')
.toBuffer(function (err, buffer) {
callback(null, {
statusCode: 200,
headers: { 'Content-Type': 'image/jpeg' },
body: buffer.toString('base64'),
isBase64Encoded: true,
});
});
});
};
Related
I'm trying to send a request with an image to a lambda function through API gateway.
I'm using this piece of code to parse the form-data-object received by my lambda function. I then upload the image to S3, but when downloading and opening the image from S3, I see that it's corrupt.
I have tried the following npm packages:
parse-multipart
parse-multipart-data
Both do not work, because I get an empty parts-array. The piece of code I use does get results in the array, but the buffers seems to be corrupt.
The problem seems to be in this line of code:
Buffer.from(item.slice(item.search(/Content-Type:\s.+/g) + item.match(/Content-Type:\s.+/g)[0].length + 4, -4), 'binary')
Does anyone has a solution for me?
You can use busboy to parse the multipart form data before uploading to S3 as shown below:
// This code is written in ECMAScript 6 (ES6), not CommonJS syntax.
// So, make sure you add <"type": "module"> in your package.json.
import {S3Client, PutObjectCommand} from '#aws-sdk/client-s3';
import Busboy from 'busboy';
// Initialize the clients outside the function handler to take advantage of execution environment reuse.
const s3Client = new S3Client({region: process.env.AWS_REGION});
// Function handler.
export async function handler(event) {
const {
ContentType: contentType = '',
File: file = '',
} = await FORM.parse(event['body'], event['headers']);
try {
// Adds an object to a bucket. We must have `WRITE` permissions on a bucket to add an object to it.
await s3Client.send(new PutObjectCommand({
Body: file,
Bucket: 'BUCKET_NAME',
ContentType: contentType,
Key: 'SOME_KEY',
}));
return {
isBase64Encoded: false,
statusCode: 200,
body: JSON.stringify({
message: 'Everything is gonna be alright.',
}),
}
} catch (e) {
return {
isBase64Encoded: false,
statusCode: 404,
body: JSON.stringify(e),
}
}
}
const FORM = {
parse(body, headers) {
return new Promise((resolve, reject) => {
const data = {};
const buffer = Buffer.from(body, 'base64');
const bb = Busboy({
headers: Object.keys(headers).reduce((newHeaders, key) => {
// busboy expects lower-case headers.
newHeaders[key.toLowerCase()] = headers[key];
return newHeaders;
}, {}),
limits: {
fileSize: 10485760, // Set as desired.
files: 1,
},
});
bb.on('file', (name, stream, info) => {
const chunks = [];
stream.on('data', (chunk) => {
if (name === 'File') {
chunks.push(chunk);
}
}).on('limit', () => {
reject(new Error('File size limit has been reached.'));
}).on('close', () => {
if (name === 'File') {
data[name] = Buffer.concat(chunks);
data['ContentType'] = info.mimeType;
}
});
});
bb.on('error', (err) => {
reject(err);
});
bb.on('close', () => {
resolve(data);
});
bb.end(buffer);
});
}
};
I am trying to concat multiple videos of different extensions with ffmpeg-fluent npm package but I am not getting output. I shared my code below.
const express = require("express");
const multer = require("multer");
const fluent_ffmpeg = require("fluent-ffmpeg");
const mergedVideo = fluent_ffmpeg();
const app = express();
const fs = require("fs");
const path = require("path");
const ffmpeg = require('#ffmpeg-installer/ffmpeg').path;
const ffprobe = require('#ffprobe-installer/ffprobe').path;
mergedVideo.setFfmpegPath(ffmpeg);
mergedVideo.setFfprobePath(ffprobe);
const PORT = process.env.PORT || 3000;
var dir = "public";
var subDirectory = "public/uploads";
app.use(express.json());
app.use(express.urlencoded({ extended: false }));
app.use(express.static(path.join(__dirname,"public")));
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir);
fs.mkdirSync(subDirectory);
}
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, "public/uploads");
},
filename: function (req, file, cb) {
cb(
null,
file.fieldname + "-" + Date.now() + path.extname(file.originalname)
);
}
});
var upload = multer({ storage: storage });
app.post('/merge',upload.any(),(req,res) => {
let file_list = [];
let str_command = "";
req.files.forEach((element)=>{
//file_list.push("-i "+"./"+element.path);
file_list.push("./"+element.path);
});
for(let i = 0;i<file_list.length;i++){
str_command +=`[${i}:v][${i}:a]`;
}
filter_option = `${str_command} concat=n=${file_list.length}:v=1:a=1 [outv] [outa]`;
output_option = ['-map [outv]','-map [outa]'];
let chainedInputs = file_list.reduce((result, inputItem) => result.addInput(inputItem), mergedVideo);
chainedInputs
.complexFilter([filter_option])
.outputOption(output_option)
.on('error', function(err) {
console.log("Error:",err.message);
})
.on('start', function() {
console.log('Merging...');
})
.on('progress', function(progress) {
// Progress goes here
console.log(progress);
})
.on('end', function() {
console.log(output_file + ' merged.');
})
.save('output.mp4');
});
app.get("/", (req, res) => {
res.sendFile(__dirname + "/index.html");
});
app.listen(PORT, () => {
console.log(`App is listening on Port ${PORT}`);
});
The error I am getting in Terminal :
App is listening on Port 3000
Merging...
Error: ffmpeg exited with code 1: Error reinitializing filters!
Failed to inject frame into filter network: Invalid argument
Error while processing the decoded data for stream #2:0
Conversion failed!
Here is my folder structure:
I'm using cities stored in a DynamoDB table to make calls to an external weather API to determine whether it will rain in that city on that day. It then calls a separate Lambda that uses that info to message subscribers of an SNS topic that it will rain.
const AWS = require('aws-sdk');
AWS.config.update({ region: 'eu-west-2' });
const lambda = new AWS.Lambda();
const axios = require('axios');
const tableName = process.env.CITY_TABLE;
const docClient = new AWS.DynamoDB.DocumentClient();
const publishMessageLambda = process.env.PUBLISH_MESSAGE_LAMBDA_NAME;
const weatherApiKey = process.env.WEATHER_API_KEY;
exports.handler = async (event) => {
const scanParams = {
TableName: tableName,
AttributesToGet: ['city'],
};
try {
let citiesArr = await docClient.scan(scanParams).promise();
citiesArr.Items.forEach(async (cityObj) => {
// console.log('cityObj', cityObj);
// console.log('city', cityObj.city);
// console.log('weatherApiKey:', weatherApiKey);
let weatherReport = await axios({
method: 'get',
url: 'http://api.weatherapi.com/v1/forecast.json',
params: {
key: weatherApiKey,
q: cityObj.city,
days: 1,
},
});
// console.log(weatherReport);
let city = cityObj.city;
let dailyChanceOfRain = Number(
weatherReport.data.forecast.forecastday[0].day.daily_chance_of_rain
);
let totalPrecip =
weatherReport.data.forecast.forecastday[0].day.totalprecip_mm;
let lambdaParams = {
FunctionName: publishMessageLambda,
InvocationType: 'RequestResponse',
Payload: JSON.stringify({
body: { dailyChanceOfRain, totalPrecip, city },
}),
};
console.log('dailyChanceOfRain: ', dailyChanceOfRain);
console.log('totalPrecip: ', totalPrecip);
if (dailyChanceOfRain > 50 && totalPrecip > 3) {
let data = await lambda.invoke(lambdaParams).promise();
}
// console.log(
// 'weatherReport: ',
// weatherReport.data.forecast.forecastday[0].day
// );
});
} catch (error) {
console.log(error);
}
};
Running the function locally produces the desired logs on the command line however when running the function on Lambda the logs involving the external http request don't show on the cold start and the logs only show up once the function has been invoked multiple times and often not together.
Any advice would be much appreciated.
I've created a lambda and cloud formation template which grants a lambda access to the parameter store and secrets manager. When I test the lambda I have the following functions outside of the export.handler function:
function getParameterFromStore(param){
let promise = new Promise(function(resolve, reject){
console.log('++ ' + param.Path);
servmgr.getParametersByPath(param, function(err, data){
if(err){
reject(console.log('Error getting parameter: ' + err, err.stack));
} else {
resolve(data);
}
});
});
let parameterResult = promise.then(function(result){
console.log('---- result: '+ JSON.stringify(result));
return result;
});
return parameterResult;
};
servmgr is instantiated as var servmgr = new AWS.SSM();
When I call this function from the export.handler function I do so as:
myFirstParam = { Path : '/myPath/Service/servicesEndpoint'};
let endpointResult = getParameterFromStore(myFirstParam);
In the lambda I have the function retrieve the parameter defined outside of the export.handler function bt wrapped in a promise.
When I run/test this lambda the object returned is always undefined... I get Parameters[] back but no values.
2019-02-20T21:42:41.340Z 2684fe88-d552-4560-a477-6761f2de6717 ++ /myPath/Service/serviceEndpoint
2019-02-20T21:42:41.452Z 2684fe88-d552-4560-a477-6761f2de6717 ---- result: {"Parameters":[]}
How do you get parameter values returned back to a lambda at run time?
update
based upon the suggestion/answer from Thales I've simplified the lambda to just this:
const getParameterFromStoreAsync = (param) => {
return new Promise((resolve, reject) => {
servmgr.getParametersByPath(param, (err, data) => {
if(err){
reject(console.log('Error getting parameter: ' + err, err.stack));
}
return resolve(data);
});
});
};
exports.handler = async(event, ctx, callback) => {
console.log('INFO[lambda]: Event: [' + JSON.stringify(event, null, 2) + ']');
console.log('this is the event' + JSON.stringify(event));
sfdcEndPointParam = { Path : '/PartnerBanking/Service/SfdcEndpoint'};
let myendpoint = await getParameterFromStoreAsync(sfdcEndPointParam);
console.log('### endpoint path: ' + JSON.stringify(myendpoint));
done = ()=>{}
callback(null, done());
};
I am still seeing an empty array being returned in my tests:
### endpoint path: {"Parameters":[]}
I've also moved the function into the callback as
exports.handler = (event,ctx, callback){
done = async()=>{
console.log('this is the event' + JSON.stringify(event));
sfdcEndPointParam = { Path : '/PartnerBanking/Service/SfdcEndpoint'};
let myendpoint = await getParameterFromStoreAsync(sfdcEndPointParam);
console.log('### endpoint path: ' + JSON.stringify(myendpoint));}
}
callback(null, done());
Same result ... empty array. Any additional things to try?
This is because your getParameterFromStore returns before your then() code is executed, thus parameterResult is undefined. If you don't want to change your code too much, I would return the Promise you create, like this:
function getParameterFromStore(param){
return new Promise(function(resolve, reject){
console.log('++ ' + param.Path);
servmgr.getParametersByPath(param, function(err, data){
if(err){
reject(console.log('Error getting parameter: ' + err, err.stack));
} else {
resolve(data);
}
});
});
};
And finally, on your function's client, you can get the result like this:
const myFirstParam = { Path : '/myPath/Service/servicesEndpoint'}
getParameterFromStore(myFirstParam).then(console.log)
When coding in NodeJS, however, I highly recommend you use async/await instead, so you'll be able to escape the Promise Hell (chaninig Promise after Promise in order to achieve something "synchronously")
When using async/await, you can design your code as though it was synchronous. Here's a refactored version of your example, using async/await as well as arrow functions:
const getParameterFromStore = param => {
return new Promise((resolve, reject) => {
console.log('++ ' + param.Path);
servmgr.getParametersByPath(param, (err, data) => {
if (err) {
console.log('Error getting parameter: ' + err, err.stack)
return reject(err);
}
return resolve(data);
});
})
}
exports.handler = async (event) => {
const endpointResult = await getParameterFromStore(event.someAttributeFromTheEventThatYouWantToUse)
console.log(endpointResult)
};
EDIT: After the OP fixed the first issue, I created a working example on my own. It turned out that the way the OP was invoking the API was incorrect.
Here's the full working example:
'use strict';
const AWS = require('aws-sdk')
AWS.config.update({
region: 'us-east-1'
})
const parameterStore = new AWS.SSM()
const getParam = param => {
return new Promise((res, rej) => {
parameterStore.getParameter({
Name: param
}, (err, data) => {
if (err) {
return rej(err)
}
return res(data)
})
})
}
module.exports.get = async (event, context) => {
const param = await getParam('MyTestParameter')
console.log(param);
return {
statusCode: 200,
body: JSON.stringify(param)
};
};
Mind the Name attribute which must be provided as part of the API call to the ServiceManager.getAttribute method.
This attribute is stated in the official docs
I have run this myself and here's the output in CloudWatch Logs:
As you can see, the value was returned successfully.
Hope this helps!
If your lambda is deployed on VPC, make sure that Security Group is attached to it and outbound traffic is allowed. It will be able to access parameter store automatically.
https://aws.amazon.com/premiumsupport/knowledge-center/lambda-vpc-parameter-store/
A simpler solution would be:
const getParameterFromStore = (params) => servmgr.getParametersByPath(params).promise();
const myFirstParam = { Path : '/myPath/Service'};
getParameterFromStore(myFirstParam).then(console.log);
As you can see, the SDK itself provides utility functinality that you can use depending on your needs to use in an async or syncronious fashion.
Hope it helps.
This code works just fine locally using nodejs. Images download from s3, write to file.
However, in Lambda (using nodejs 8.10) I'm getting "Internal Server Error" when testing the function with this in the Logs:
"Execution failed due to configuration error: Malformed Lambda proxy response"
I am using the lambda proxy response in the callback, but clearly some AWS SDK error with S3 is not getting caught.
I do have a role setup with S3 full access that the Lambda has access to.
What am I missing with my first Lambda function? Docs and tutorials I've followed correctly and it is not working.
const async = require('async')
const aws = require('aws-sdk')
const fs = require('fs')
const exec = require('child_process').exec
const bucket = 'mybucket'
const s3Src = 'bucket_prefix'
const s3Dst = 'new_prefix'
const local = `${__dirname}/local/`
aws.config.region = 'us-west-2'
const s3 = new aws.S3()
exports.handler = async (event, context, callback) => {
const outputImage = 'hello_world.png'
const rack = JSON.parse(event.body)
const images = my.images
async.waterfall([
function download(next) {
let downloaded = 0
let errors = false
let errorMessages = []
for (let i = 0; i < images.length; i++) {
let key = `${s3Src}/${images[i].prefix}/${images[i].image}`,
localImage = `${local}${images[i].image}`
getBucketObject(bucket, key, localImage).then(() => {
++downloaded
if (downloaded === images.length) { // js is non blocking, need to check if all images have been downloaded. If so, then go to next function
if (errors) {
next(errorMessages.join(' '))
} else {
next(null)
}
}
}).catch(error => {
errorMessages.push(`${error} - ${localImage}`)
++downloaded
errors = true
})
}
function getBucketObject(bucket, key, dest) {
return new Promise((resolve, reject) => {
let ws = fs.createWriteStream(dest)
ws.once('error', (err) => {
return reject(err)
})
ws.once('finish', () => {
return resolve(dest)
})
let s3Stream = s3.getObject({
Bucket: bucket,
Key: key
}).createReadStream()
s3Stream.pause() // Under load this will prevent first few bytes from being lost
s3Stream.on('error', (err) => {
return reject(err)
})
s3Stream.pipe(ws)
s3Stream.resume()
})
}
}
], err => {
if (err) {
let response = {
"statusCode": 400,
"headers": {
"my_header": "my_value"
},
"body": JSON.stringify(err),
"isBase64Encoded": false
}
callback(null, response)
} else {
let response = {
"statusCode": 200,
"headers": {
"my_header": "my_value"
},
"body": JSON.stringify(`<img src="${local}${outputImage}" />`),
"isBase64Encoded": false
}
callback(null, response)
}
}
)
}
Response should be always sent to callback function. Your code sends response only on error. That's why Lambda executor thinks your code fails.
BTW - should your functions in async.waterfall be separated with coma, as two tasks?
Locally, I've been running nodejs 10.10 and lambda currently is at 8.10. That is a big part I'm sure. In the end I had to remove the async. I had to move the getBucketObject function out of the waterfall. Once I made those adjustments it started working. And another issue was the downloaded images needed to go into "/tmp" directory.
const aws = require('aws-sdk')
const async = require('async')
const fs = require('fs')
const bucket = 'mybucket'
const s3Src = 'mys3src'
const local = '/tmp/'
aws.config.region = 'us-west-2'
const s3 = new aws.S3()
exports.handler = (event, context, callback) => {
const outputImage = 'hello_world.png'
async.waterfall([
function download(next) {
let downloaded = 0,
errorMessages = []
for (let i = 0; i < event['images'].length; i++) {
let key = `${s3Src}/${event['images'][i]['prefix']}/${event['images'][i]['image']}`,
localImage = `${local}${event['images'][i]['image']}`
getBucketObject(bucket, key, localImage).then(() => {
downloaded++
if (downloaded === event['images'].length) {
if (errorMessages.length > 0) {
next(errorMessages.join(' '))
} else {
console.log('All downloaded')
next(null)
}
}
}).catch(error => {
downloaded++
errorMessages.push(`${error} - ${localImage}`)
if (downloaded === event['images'].length) {
next(errorMessages.join(' '))
}
})
}
}
], err => {
if (err) {
console.error(err)
callback(null, {
"statusCode": 400,
"body": JSON.stringify(err),
"isBase64Encoded": false
})
} else {
console.log('event image created!')
callback(null, {
"statusCode": 200,
"body": JSON.stringify(`<img src="${local}${outputImage}" />`),
"isBase64Encoded": false
})
}
}
)
}
function getBucketObject(bucket, key, dest) {
return new Promise((resolve, reject) => {
let ws = fs.createWriteStream(dest)
ws.once('error', (err) => {
return reject(err)
})
ws.once('finish', () => {
return resolve(dest)
})
let s3Stream = s3.getObject({
Bucket: bucket,
Key: key
}).createReadStream()
s3Stream.pause() // Under load this will prevent first few bytes from being lost
s3Stream.on('error', (err) => {
return reject(err)
})
s3Stream.pipe(ws)
s3Stream.resume()
})
}