AWS Lambda Get Image and Upload to S3 - amazon-web-services

I am working in a AWS Lambda function. I am successfully making an API call to the NASA APOD and getting back the values. I want to take the url for the image and download that image and then upload into S3. I am getting an error when I try to access the "test.jpg" image, "Error: EACCES: permission denied, open 'test.jpg'". If I move the S3bucket.putObject outside the http.request, I get data is equal to null. I know I am missing something simple. Thought?
function GetAPOD(intent, session, callback) {
var nasa_api_key = 'demo-key'
, nasa_api_path = '/planetary/apod?api_key=' + nasa_api_key;
var options = {
host: 'api.nasa.gov',
port: 443,
path: nasa_api_path,
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf-8');
var responseString = '';
res.on('data', function (data) {
responseString += data;
});
res.on('end', function () {
console.log('API Response: ' + responseString);
var responseObject = JSON.parse(responseString)
, image_date = responseObject['date']
, image_title = responseObject['title']
, image_url = responseObject['url']
, image_hdurl = responseObject['hdurl']
, image_desc = responseObject['explanation'];
var s3Bucket = new AWS.S3( { params: {Bucket: 'nasa-apod'} } );
var fs = require('fs');
var file = fs.createWriteStream("test.jpg");
var request = http.get(image_url, function(response) {
response.pipe(file);
var data = {Key: "test.jpg", Body: file};
s3Bucket.putObject(data, function(err, data) {
if (err) {
console.log('Error uploading data: ', data);
}
else {
console.log('succesfully uploaded the image!');
}
});
});
});
});
req.on('error', function (e) {
console.error('HTTP error: ' + e.message);
});
//req.write();
req.end();
}

You need to be writing the file to /tmp. That's the only directory in the Lambda environment that you will have write access to.

I got it!! Thank you Mark B for the help. I was able to get the data from the stream without saving it locally and then writing to the bucket. I did have to change my IAM role to allow the putObject for S3.
function GetAPOD(intent, session, callback) {
var nasa_api_key = 'demo-key'
, nasa_api_path = '/planetary/apod?api_key=' + nasa_api_key;
var options = {
host: 'api.nasa.gov',
port: 443,
path: nasa_api_path,
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf-8');
var responseString = '';
res.on('data', function (data) {
responseString += data;
});
res.on('end', function () {
// console.log('API Response: ' + responseString);
var responseObject = JSON.parse(responseString)
, image_date = responseObject['date']
, image_title = responseObject['title']
, image_url = responseObject['url']
, image_hdurl = responseObject['hdurl']
, image_desc = responseObject['explanation'];
var image_name = image_date + '.jpg';
var s3 = new AWS.S3();
var s3Bucket = new AWS.S3( { params: {Bucket: 'nasa-apod'} } );
var request = http.get(image_url, function(response) {
var image_stream = null;
response.on('data', function (data) {
image_stream = data;
});
response.on('end', function () {
var param_data = {Key: image_name, Body: image_stream, ContentType: "image/jpeg", ContentLength: response.headers['content-length']};
s3Bucket.putObject(param_data, function(err, output_data) {
if (err) {
console.log('Error uploading data to S3: ' + err);
}
});
});
});
request.end();
});
});
req.on('error', function (e) {
console.error('HTTP error: ' + e.message);
});
req.end();
}

Related

Signing GET HTTP Requests to Amazon Elasticsearch Service

I need to call "Signing GET HTTP Requests to Amazon Elasticsearch Service" using lambda function.
I have already tried http package and it's working fine in http request
http.get(`http://search-"my_ES_service_name"-xxxxxxxxxxx-6fa27gkk4v3dugykj46tzsipbu.xx-xxxx-x.es.amazonaws.com/${event['index']}/doc/_search/?q=${event['keyParam']}`,
function(res) {
var body = '';
res.on('data', function(d) {
body += d;
});
res.on('end', function() {
context.succeed(JSON.parse(body.replace(/\n|\r/g, ""))); //Remove and newline/linebreak chars
});
}).on('error', function(e) {
console.log("Error: " + e.message);
context.done(null, 'FAILURE');
});
var AWS = require('aws-sdk');
exports.handler = function(event, context) {
var region = 'xx-xxxx-x';
var domain = 'http://search-"my_ES_service_name"-xxxxxxxxxxx-6fa27gkk4v3dugykj46tzsipbu.xx-xxxx-x.es.amazonaws.com';
var index = event['index'];
var type = `_doc/_search`;
var endpoint = new AWS.Endpoint(domain);
var request = new AWS.HttpRequest(endpoint, region);
request.method = 'GET';
request.path += index + '/' + type+'?q=_doc_key_here:_doc_key_value';
request.headers['host'] = domain;
> e.g. URL genrate like: http://search-"my_ES_service_name"-xxxxxxxxxxx-6fa27gkk4v3dugykj46tzsipbu.xx-xxxx-x.es.amazonaws.com/node-test/doc/_search/?q=user_name:johndoe
var credentials = new AWS.EnvironmentCredentials('AWS');
var signer = new AWS.Signers.V4(request, 'es');
signer.addAuthorization(credentials, new Date());
var client = new AWS.HttpClient();
client.handleRequest(request, null, function(response) {
console.log("response: ",response.statusCode);
var responseBody = '';
response.on('data', function (chunk) {
responseBody += chunk;
});
response.on('end', function (chunk) {
console.log('Response body: ' + responseBody);
context.succeed(responseBody)
});
}, function(error) {
console.log('Error: ' + error);
context.done(error);
});
}
when I'm trying to call "Signing GET HTTP Requests" using above function, then it's thrown me the following error:
response: 400 Bad Request
Only one thing is missing here, I have added encodeURI() in request and it works fine for me
var index = event['index'];
var type = `_doc/_search?q=_doc_key_here:_doc_key_value`;
request.method = 'GET';
request.path += index + '/' + encodeURI(type);
I hope it will help other guys
Thanks

How to POST data for evaluation in middleware in loopback?

I want to use custom API to evaluate data which are posted by applications but remote methods are not accepted in middleware in loopback
module.exports = function () {
const http = require('https');
var request = require('request');
var { Lib } = require('Lib');
var lib = new Lib;
verification.checkID = function (ID, cb) {
cb(null, 'ID is :' + ID);
}
verification.remoteMethod('greet', {
accepts: {
arg: 'ID',
type: 'string'
},
returns: {
arg: 'OK',
type: 'string'
}
});
module.exports = function () {
const http = require('https');
var request = require('request');
var { Lib } = require('Lib');
var lib = new Lib;
verification.checkID = function (ID, cb) {
cb(null, 'ID is :' + ID);
}
verification.remoteMethod('greet', {
'http': { // add the verb here
'path': '/greet',
'verb': 'post'
},
accepts: {
arg: 'ID',
type: 'string'
},
returns: {
arg: 'OK',
type: 'string'
}
});
Update
module.exports = function(server) {
// Install a `/` route that returns server status
var router = server.loopback.Router();
router.get('/', server.loopback.status());
router.get('/ping', function(req, res) { // your middle ware function now you need to call the next() here
res.send('pong');
});
server.use(router);
};
To evaluate is something i am not getting please check this link too Intercepting error handling with loopback
Regarding to fallowing question How to make a simple API for post method?
I find my solution in fallowing way:
module.exports = function(server) {
const https = require('https');
var request = require('request');
return function verification(req, res, next) {
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE');
res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
res.setHeader('Access-Control-Allow-Credentials', true);
var request;
var response;
var body = '';
// When a chunk of data arrives.
req.on('data', function (chunk) {
// Append it.
body += chunk;
});
// When finished with data.
req.on('end', function () {
// Show what just arrived if POST.
if (req.method === 'POST') {
console.log(body);
}
// Which method?
switch (req.method) {
case 'GET':
Verify url and respond with appropriate data.
handleGet(req, res);
Response has already been sent.
response = '';
break;
case 'POST':
// Verify JSON request and respond with stringified JSON response.
response = handlePost(body);
break;
default:
response = JSON.stringify({ 'error': 'Not A POST' });
break;
}
// Send the response if not empty.
if (response.length !== 0) {
res.write(response);
res.end();
}
// Paranoid clear of the 'body'. Seems to work without
// this, but I don't trust it...
body = '';
});
// If error.
req.on('error', function (err) {
res.write(JSON.stringify({ 'error': err.message }));
res.end();
});
//
};
function handlePost(body) {
var response = '';
var obj = JSON.parse(body);
// Error if no 'fcn' property.
if (obj['fcn'] === 'undefined') {
return JSON.stringify({ 'error': 'Request method missing' });
}
// Which function.
switch (obj['fcn']) {
// Calculate() requres 3 arguments.
case 'verification':
// Error if no arguments.
if ((obj['arg'] === 'undefined') || (obj['arg'].length !== 3)) {
response = JSON.stringify({ 'error': 'Arguments missing' });
break;
}
// Return with response from method.
response = verification(obj['arg']);
break;
default:
response = JSON.stringify({ 'error': 'Unknown function' });
break;
}
return response;
};
function verification(arg) {
var n1 = Number(arg[0]);
var n2 = Number(arg[1]);
var n3 = Number(arg[2]);
var result;
// Addem up.
result = n1 + n2 + n3;
// Return with JSON string.
return JSON.stringify({ 'result': result });
};
};

AWS S3 - How to stop images from force downloading instead of displaying.

I am currently trying to load images from my website to AWS S3. I have the functionality working where it uploads the image to the server but when i try to view the images they download instead of displaying. I read there is a way to set the file type so this would not happen. I am not sure how to do that. Any help would be great.
router.post('/heroes/createNewHeroes', function(req,res) {
var formidable = require('formidable'),
http = require('http'),
util = require('util');
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files) {
console.log(fields);
console.log(files);
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
var shortid = require('shortid');
var fs = require('fs');
var fileStream = fs.createReadStream(files.asset.path);
var newFilename = shortid.generate()+"_"+files.asset.name;
// Set your region for future requests.
AWS.config.region = 'us-west-2';
AWS.config.accessKeyId = 'access Key';
AWS.config.secretAccessKey = 'secret Key';
console.log(newFilename);
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3bucket = new AWS.S3({params: {Bucket: ' '}});
s3bucket.createBucket(function() {
var params = {Key: newFilename, Body: fileStream};
s3bucket.upload(params, function(err, data) {
if (err) {
console.log("Error uploading data: ", err);
} else {
console.log("Successfully uploaded data");
projectX.createHeroes(['plantTypes', 'asset', 'cost', 'energy', 'isSunProducer', 'isShooter', 'isExploding', 'sunFrequency', 'shootingFrequency', 'damage'], [fields.plantTypes, newFilename, fields.cost, fields.energy, fields.isSunProducer, fields.isShooter, fields.isExploding, fields.sunFrequency, fields.shootingFrequency, fields.damage], function(data){
res.redirect('/heroes')
});
}
});
});
});
});
});
var params = {Key: newFilename, ContentType: 'image/png', Body: fileStream};
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property
Just put "contentType: multerS3.AUTO_CONTENT_TYPE " . It will work .
Ex:
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'some-bucket',
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function (req, file, cb) {
cb(null, Date.now().toString())
}
})
})
Visit this link for more details https://github.com/badunk/multer-s3
This Helped me
storage: multerS3({
s3: s3,
bucket: "bucketname",
acl: "public-read",
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function(req, file, cb) {
console.log("req.file", file);
cb(null, `${Date.now()}-${file.originalname}`);
}
})

Unable to create a resized image using AWS Lambda

I have created an AWS Lambda function to resize any image that is put in my s3 bucket. I have created an event for every Object creation in s3 bucket for the Lambda function to trigger. My Handler code for Lambda is as follows:
var async = require('async'),
gm = require('gm').subClass({
imageMagick: true
}) // Enable ImageMagick integration.
, request = require('request').defaults({
encoding: null
});
var knox = require('knox');
var client = knox.createClient({
key: 'myKey',
secret: 'mySecretKey',
bucket: 'MyBucketName'
});
//hardcoded Value to get the Image url
var s3value = '.s3.amazonaws.com/';
var https = 'https://'
exports.handler = function (event, context) {
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var imageUrl = https + srcBucket + s3value + srcKey //My Http Url for image stored in s3 bucket.
request(imageUrl, function (err, res, res1) {
if (err) {
console.log(err);
} else {
gm(res1).resize(120)
.toBuffer('jpg', function (err, buffer) {
if (err) {
console.log(err);
} else {
var ImageName = "/" + imageUrl.substr(0, imageUrl.indexOf(".")) + "-1x" + imageUrl.substr(imageUrl.indexOf("."), imageUrl.length); //Renaming the present image
client.putBuffer(buffer, ImageName, function (err, response1) {
if (err) {
console.log(err);
} else {
console.log(response1.socket._httpMessage.url);
}
});
}
});
}
});
context.done();
}
My Problem is that when I store an image in S3 the resized image is not getting created in my s3 Bucket. I am unable to understand why I am unable to create a resized image in the same s3 bucket. Can somebody help me with it?
It looks like you are calling context.done() outside the callback function for request(). This will end the Lambda function before any image resizing completes. You should only call context.done() when all processing has completed or failed.
exports.handler = function (event, context) {
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var imageUrl = https + srcBucket + s3value + srcKey //My Http Url for image stored in s3 bucket.
request(imageUrl, function (err, res, res1) {
if (err) {
console.log(err);
context.fail(err);
} else {
gm(res1).resize(120)
.toBuffer('jpg', function (err, buffer) {
if (err) {
console.log(err);
context.fail(err);
} else {
var ImageName = "/" + imageUrl.substr(0, imageUrl.indexOf(".")) + "-1x" + imageUrl.substr(imageUrl.indexOf("."), imageUrl.length); //Renaming the present image
client.putBuffer(buffer, ImageName, function (err, response1) {
if (err) {
console.log(err);
context.fail(err);
} else {
console.log(response1.socket._httpMessage.url);
context.succeed("It worked");
}
});
}
});
}
});
// Don't call context.done() here, the callback hasn't run yet
//context.done();
}

"Missing credentials in config" Returned From Amazon S3 JavaScript SDK Server

I've been following the boilerplate code at http://aws.amazon.com/developers/getting-started/browser/ to get CORS uploading to work with my S3 account.
I've created a Facebook App, changed the CORS configuration XML for my S3 bucket, and filled in the appropriate variables in the JavaScript code. But when I try to upload a file through my webpage, I get the Error: Missing credentials in config response.
Can someone point me in the right direction to debugging this?
My JS:
var appId = '999943416325248';
var roleArn = 'arn:aws:iam::458182047307:role/s3-test';
var bucketName = 'my-bucket';
var fbUserId;
var bucket = new AWS.S3({
params: {
Bucket: bucketName
}
});
var fileChooser = document.getElementById('video-file-input');
var button = document.getElementById('submit-button');
var results = document.getElementById('results');
button.addEventListener('click', function () {
var file = fileChooser.files[0];
if(file){
results.innerHTML = '';
//Object key will be facebook-USERID#/FILE_NAME
var objKey = 'facebook-' + fbUserId + '/' + file.name;
var params = {
Key: objKey,
ContentType: file.type,
Body: file,
ACL: 'public-read'
};
bucket.putObject(params, function (err, data) {
if(err){
results.innerHTML = 'ERROR: ' + err;
}
else{
listObjs();
}
});
}
else{
results.innerHTML = 'Nothing to upload.';
}
}, false);
function listObjs() {
var prefix = 'facebook-' + fbUserId;
bucket.listObjects({
Prefix: prefix
}, function (err, data) {
if(err){
results.innerHTML = 'ERROR: ' + err;
}
else{
var objKeys = "";
data.Contents.forEach(function (obj) {
objKeys += obj.Key + "<br>";
});
results.innerHTML = objKeys;
}
});
}
/*!
* Login to your application using Facebook.
* Uses the Facebook SDK for JavaScript available here:
* https://developers.facebook.com/docs/javascript/gettingstarted/
*/
window.fbAsyncInit = function () {
FB.init({
appId: appId
});
FB.login(function (response) {
bucket.config.credentials = new AWS.WebIdentityCredentials({
ProviderId: 'graph.facebook.com',
RoleArn: roleArn,
WebIdentityToken: response.authResponse.accessToken
});
fbUserId = response.authResponse.userID;
button.style.display = 'block';
});
};
// Load the Facebook SDK asynchronously
(function (d, s, id) {
var js, fjs = d.getElementsByTagName(s)[0];
if(d.getElementById(id)){
return;
}
js = d.createElement(s);
js.id = id;
js.src = "//connect.facebook.net/en_US/all.js";
fjs.parentNode.insertBefore(js, fjs);
}(document, 'script', 'facebook-jssdk'));
Turns out Chrome was blocking pop-ups on my domain so Facebook authentication couldn't process. Once I allowed pop-ups from the domain, everything worked as expected