I am trying to upload an image (PNG) from my website to S3.
I am sending a multipart/form-data request to a Lambda, then parsing it using Busboy.
It uploads to S3 fine and shows that it is an image/png but if I download the file and try to view it, the file is invalid
What could cause that? I don't see where I'm going wrong here.
Code:
var AWS = require('aws-sdk');
var BluebirdPromise = require("bluebird");
var Busboy = require('busboy');
var s3 = BluebirdPromise.promisifyAll(new AWS.S3());
var str = require('string-to-stream');
const SavePFP = async((user_id, req) => {
var busboy = new Busboy({headers: req.headers});
let res = await(new Promise((resolve) => {
busboy.on('file', function (fieldname, file, filename, encoding, mimetype) {
console.log('File [' + fieldname + ']: filename: ' + filename);
file.on('data', function (data) {
console.log('File [' + fieldname + '] got ' + data.length + ' bytes');
resolve({
filename,
encoding,
mimetype,
data
});
});
file.on('end', function () {
console.log('File [' + fieldname + '] Finished');
});
});
str(req.rawBody).pipe(busboy);
}));
let {data, encoding, mimetype} = res;
var params = {
Bucket: '...',
Key: user_id,
Body: data,
ACL: 'public-read',
ContentEncoding: encoding,
ContentType: mimetype
};
console.log("Uploading to AWS S3...");
let response = await(s3.upload(params).promise());
console.log("[SavePFP]: " + JSON.stringify(response, null, 2));
if (response && response.Location) {
return response.Location;
}
});
module.exports = {
SavePFP
};
Thanks in advance!
I just avoided solving this altogether by converting the file to base64 and uploading it from there.
Thanks
Related
I need to call "Signing GET HTTP Requests to Amazon Elasticsearch Service" using lambda function.
I have already tried http package and it's working fine in http request
http.get(`http://search-"my_ES_service_name"-xxxxxxxxxxx-6fa27gkk4v3dugykj46tzsipbu.xx-xxxx-x.es.amazonaws.com/${event['index']}/doc/_search/?q=${event['keyParam']}`,
function(res) {
var body = '';
res.on('data', function(d) {
body += d;
});
res.on('end', function() {
context.succeed(JSON.parse(body.replace(/\n|\r/g, ""))); //Remove and newline/linebreak chars
});
}).on('error', function(e) {
console.log("Error: " + e.message);
context.done(null, 'FAILURE');
});
var AWS = require('aws-sdk');
exports.handler = function(event, context) {
var region = 'xx-xxxx-x';
var domain = 'http://search-"my_ES_service_name"-xxxxxxxxxxx-6fa27gkk4v3dugykj46tzsipbu.xx-xxxx-x.es.amazonaws.com';
var index = event['index'];
var type = `_doc/_search`;
var endpoint = new AWS.Endpoint(domain);
var request = new AWS.HttpRequest(endpoint, region);
request.method = 'GET';
request.path += index + '/' + type+'?q=_doc_key_here:_doc_key_value';
request.headers['host'] = domain;
> e.g. URL genrate like: http://search-"my_ES_service_name"-xxxxxxxxxxx-6fa27gkk4v3dugykj46tzsipbu.xx-xxxx-x.es.amazonaws.com/node-test/doc/_search/?q=user_name:johndoe
var credentials = new AWS.EnvironmentCredentials('AWS');
var signer = new AWS.Signers.V4(request, 'es');
signer.addAuthorization(credentials, new Date());
var client = new AWS.HttpClient();
client.handleRequest(request, null, function(response) {
console.log("response: ",response.statusCode);
var responseBody = '';
response.on('data', function (chunk) {
responseBody += chunk;
});
response.on('end', function (chunk) {
console.log('Response body: ' + responseBody);
context.succeed(responseBody)
});
}, function(error) {
console.log('Error: ' + error);
context.done(error);
});
}
when I'm trying to call "Signing GET HTTP Requests" using above function, then it's thrown me the following error:
response: 400 Bad Request
Only one thing is missing here, I have added encodeURI() in request and it works fine for me
var index = event['index'];
var type = `_doc/_search?q=_doc_key_here:_doc_key_value`;
request.method = 'GET';
request.path += index + '/' + encodeURI(type);
I hope it will help other guys
Thanks
I trying to upload video files in AWS S3 bucket using javascript AWS SDK , but not able to upload
JS CODE :
function uploadFile(dir) {
var files = document.getElementById('photoupload').files;
if (!files.length) {
return alert('Please choose a file to
upload first.');
}
var file = files[0];
var fileName = file.name;
var albumPhotosKey = encodeURIComponent(dir) + '//';
var photoKey = albumPhotosKey + fileName;
s3.putObject({
Key: photoKey,
Body: file,
ContentType: file.type,
ACL: 'public-read'
}, function (err, data) {
if (err) {
return alert('There was an error
uploading your file: ', err.message);
}
alert('Successfully uploaded file.');
// viewAlbum(albumName);
});
}
I want to use the AWS S3 JS SDK to upload multiple files from a browser. I can get one file just fine, but can't get multiple. When I select mulitple files, the last file is the only one to get uploaded. Here's the code:
//head
<script src="https://sdk.amazonaws.com/js/aws-sdk-2.213.1.min.js"></script>
<script type="text/javascript" src="https://code.jquery.com/jquery-3.1.1.min.js"></script>
//body
<input type="file" id="file-chooser" multiple />
<button id="upload-button">Upload to S3</button>
<div id="results"></div>
<script type="text/javascript">
AWS.config.region = 'us-east-1';
AWS.config.credentials = new AWS.CognitoIdentityCredentials({
IdentityPoolId: '###'
});
AWS.config.credentials.get(function(err) {
if (err) alert(err);
console.log(AWS.config.credentials);
});
var bucketName = 'c2networkingfiles'; // Enter your bucket name
var bucket = new AWS.S3({
params: {
Bucket: bucketName
}
});
var fileChooser = document.getElementById('file-chooser');
var button = document.getElementById('upload-button');
var results = document.getElementById('results');
button.addEventListener('click', function() {
var fileArr = fileChooser.files;
if (fileArr[0]) {
for (k=0; k<fileArr.length; k++) {
var file = fileArr[k];
var fileName = file.name;
// test to see if file already exists
var objKey2 = 'testing/' + file.name;
var objE = new AWS.S3();
var params2 = {
Bucket: bucketName,
Key: objKey2
};
objE.headObject(params2, function(err, data) {
if (data) {
results.innerHTML = 'File is present. Uploaded on ' + data.LastModified;
//console.log(data);
} else {
//results.innerHTML = '';
var objKey = 'testing/' + file.name;
var params = {
Key: objKey,
ContentType: file.type,
Body: file,
ACL: 'public-read'
};
bucket.putObject(params, function(err, data) {
if (err) {
results.innerHTML = 'ERROR: ' + err;
} else {
//listObjs();
results.append('SUCCESS! ' + fileName + ' uploaded. <br />');
}
});
}
});
}
} else {
results.append('Nothing to upload.');
}
}, false);
</script>
The original code example didn't have the loop and I'm wondering if this isn't working right because there isn't a mechanism to wait until the first file has finished before the loop starts the next upload.
If this is the answer, is there a way to check the upload status and wait until the first file is complete before the loop is allowed to continue?
If this isn't the answer, what else could be happening?
I am currently trying to load images from my website to AWS S3. I have the functionality working where it uploads the image to the server but when i try to view the images they download instead of displaying. I read there is a way to set the file type so this would not happen. I am not sure how to do that. Any help would be great.
router.post('/heroes/createNewHeroes', function(req,res) {
var formidable = require('formidable'),
http = require('http'),
util = require('util');
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files) {
console.log(fields);
console.log(files);
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
var shortid = require('shortid');
var fs = require('fs');
var fileStream = fs.createReadStream(files.asset.path);
var newFilename = shortid.generate()+"_"+files.asset.name;
// Set your region for future requests.
AWS.config.region = 'us-west-2';
AWS.config.accessKeyId = 'access Key';
AWS.config.secretAccessKey = 'secret Key';
console.log(newFilename);
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3bucket = new AWS.S3({params: {Bucket: ' '}});
s3bucket.createBucket(function() {
var params = {Key: newFilename, Body: fileStream};
s3bucket.upload(params, function(err, data) {
if (err) {
console.log("Error uploading data: ", err);
} else {
console.log("Successfully uploaded data");
projectX.createHeroes(['plantTypes', 'asset', 'cost', 'energy', 'isSunProducer', 'isShooter', 'isExploding', 'sunFrequency', 'shootingFrequency', 'damage'], [fields.plantTypes, newFilename, fields.cost, fields.energy, fields.isSunProducer, fields.isShooter, fields.isExploding, fields.sunFrequency, fields.shootingFrequency, fields.damage], function(data){
res.redirect('/heroes')
});
}
});
});
});
});
});
var params = {Key: newFilename, ContentType: 'image/png', Body: fileStream};
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property
Just put "contentType: multerS3.AUTO_CONTENT_TYPE " . It will work .
Ex:
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'some-bucket',
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function (req, file, cb) {
cb(null, Date.now().toString())
}
})
})
Visit this link for more details https://github.com/badunk/multer-s3
This Helped me
storage: multerS3({
s3: s3,
bucket: "bucketname",
acl: "public-read",
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function(req, file, cb) {
console.log("req.file", file);
cb(null, `${Date.now()}-${file.originalname}`);
}
})
I am working in a AWS Lambda function. I am successfully making an API call to the NASA APOD and getting back the values. I want to take the url for the image and download that image and then upload into S3. I am getting an error when I try to access the "test.jpg" image, "Error: EACCES: permission denied, open 'test.jpg'". If I move the S3bucket.putObject outside the http.request, I get data is equal to null. I know I am missing something simple. Thought?
function GetAPOD(intent, session, callback) {
var nasa_api_key = 'demo-key'
, nasa_api_path = '/planetary/apod?api_key=' + nasa_api_key;
var options = {
host: 'api.nasa.gov',
port: 443,
path: nasa_api_path,
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf-8');
var responseString = '';
res.on('data', function (data) {
responseString += data;
});
res.on('end', function () {
console.log('API Response: ' + responseString);
var responseObject = JSON.parse(responseString)
, image_date = responseObject['date']
, image_title = responseObject['title']
, image_url = responseObject['url']
, image_hdurl = responseObject['hdurl']
, image_desc = responseObject['explanation'];
var s3Bucket = new AWS.S3( { params: {Bucket: 'nasa-apod'} } );
var fs = require('fs');
var file = fs.createWriteStream("test.jpg");
var request = http.get(image_url, function(response) {
response.pipe(file);
var data = {Key: "test.jpg", Body: file};
s3Bucket.putObject(data, function(err, data) {
if (err) {
console.log('Error uploading data: ', data);
}
else {
console.log('succesfully uploaded the image!');
}
});
});
});
});
req.on('error', function (e) {
console.error('HTTP error: ' + e.message);
});
//req.write();
req.end();
}
You need to be writing the file to /tmp. That's the only directory in the Lambda environment that you will have write access to.
I got it!! Thank you Mark B for the help. I was able to get the data from the stream without saving it locally and then writing to the bucket. I did have to change my IAM role to allow the putObject for S3.
function GetAPOD(intent, session, callback) {
var nasa_api_key = 'demo-key'
, nasa_api_path = '/planetary/apod?api_key=' + nasa_api_key;
var options = {
host: 'api.nasa.gov',
port: 443,
path: nasa_api_path,
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf-8');
var responseString = '';
res.on('data', function (data) {
responseString += data;
});
res.on('end', function () {
// console.log('API Response: ' + responseString);
var responseObject = JSON.parse(responseString)
, image_date = responseObject['date']
, image_title = responseObject['title']
, image_url = responseObject['url']
, image_hdurl = responseObject['hdurl']
, image_desc = responseObject['explanation'];
var image_name = image_date + '.jpg';
var s3 = new AWS.S3();
var s3Bucket = new AWS.S3( { params: {Bucket: 'nasa-apod'} } );
var request = http.get(image_url, function(response) {
var image_stream = null;
response.on('data', function (data) {
image_stream = data;
});
response.on('end', function () {
var param_data = {Key: image_name, Body: image_stream, ContentType: "image/jpeg", ContentLength: response.headers['content-length']};
s3Bucket.putObject(param_data, function(err, output_data) {
if (err) {
console.log('Error uploading data to S3: ' + err);
}
});
});
});
request.end();
});
});
req.on('error', function (e) {
console.error('HTTP error: ' + e.message);
});
req.end();
}