I trying to upload video files in AWS S3 bucket using javascript AWS SDK , but not able to upload
JS CODE :
function uploadFile(dir) {
var files = document.getElementById('photoupload').files;
if (!files.length) {
return alert('Please choose a file to
upload first.');
}
var file = files[0];
var fileName = file.name;
var albumPhotosKey = encodeURIComponent(dir) + '//';
var photoKey = albumPhotosKey + fileName;
s3.putObject({
Key: photoKey,
Body: file,
ContentType: file.type,
ACL: 'public-read'
}, function (err, data) {
if (err) {
return alert('There was an error
uploading your file: ', err.message);
}
alert('Successfully uploaded file.');
// viewAlbum(albumName);
});
}
Related
I am trying upload the file using Lambda to S3 way API GateWay, i am using the code below, the file is sent to S3 such as show the images 'test_using_postman.png' and 's3_file.png', however when I try open, the show message the file corrupted such as show the 'open_file_with_problem.png'. The images are below.
Somone know the happened ?
Thank you!
Images:
Test Using Postman: https://drive.google.com/open?id=1eenEnvuMQU28iI_Ltqzpw9OlCvIcY5Fg
S3 File: https://drive.google.com/open?id=1b1_CmIhzfc8mQj_rwCK6Xy30gzoP6HcK
Open File with problem: https://drive.google.com/open?id=1o54rLB9wWF1KxdUOkq3xAGVET7UWoqgf
Code NodeJS:
const crypto = require('crypto');
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
module.exports.arquivo_upload = (event, context, callback) => {
let BUCKET_NAME = 'XXXXX';
let fileContent = event.body;
let filePath = 'upload/';
let fileName = crypto.createHash('md5').update('niby_'+Date.now()).digest("hex");
s3 = new AWS.S3({apiVersion: '2006-03-01'});
var uploadParams = {
Bucket: BUCKET_NAME,
Key: filePath+fileName+'.png',
Body: fileContent,
ContentType: "image/png"
};
s3.upload(uploadParams, function (err, data) {
if (err) {
console.error(err);
callback(null,{
statusCode:400,
body: JSON.stringify(err),
});
} if (data) {
//TODO: Call other api to save file name
console.info(data.Location);
callback(null,{
statusCode:200,
body: JSON.stringify(data.Location),
});
}
});
}
I resolved this problem! I sending file using base64 to API Gateway and lambda functions setup parameter "ContentEncoding: 'base64'".
var uploadParams = {
Bucket: config.s3.bucket_name,
Key: config.s3.file_path+fileName+obj.extension,
Body: buf,
ContentEncoding: 'base64',
ContentType: obj.content_type,
ACL: "public-read"
};
I want to use the AWS S3 JS SDK to upload multiple files from a browser. I can get one file just fine, but can't get multiple. When I select mulitple files, the last file is the only one to get uploaded. Here's the code:
//head
<script src="https://sdk.amazonaws.com/js/aws-sdk-2.213.1.min.js"></script>
<script type="text/javascript" src="https://code.jquery.com/jquery-3.1.1.min.js"></script>
//body
<input type="file" id="file-chooser" multiple />
<button id="upload-button">Upload to S3</button>
<div id="results"></div>
<script type="text/javascript">
AWS.config.region = 'us-east-1';
AWS.config.credentials = new AWS.CognitoIdentityCredentials({
IdentityPoolId: '###'
});
AWS.config.credentials.get(function(err) {
if (err) alert(err);
console.log(AWS.config.credentials);
});
var bucketName = 'c2networkingfiles'; // Enter your bucket name
var bucket = new AWS.S3({
params: {
Bucket: bucketName
}
});
var fileChooser = document.getElementById('file-chooser');
var button = document.getElementById('upload-button');
var results = document.getElementById('results');
button.addEventListener('click', function() {
var fileArr = fileChooser.files;
if (fileArr[0]) {
for (k=0; k<fileArr.length; k++) {
var file = fileArr[k];
var fileName = file.name;
// test to see if file already exists
var objKey2 = 'testing/' + file.name;
var objE = new AWS.S3();
var params2 = {
Bucket: bucketName,
Key: objKey2
};
objE.headObject(params2, function(err, data) {
if (data) {
results.innerHTML = 'File is present. Uploaded on ' + data.LastModified;
//console.log(data);
} else {
//results.innerHTML = '';
var objKey = 'testing/' + file.name;
var params = {
Key: objKey,
ContentType: file.type,
Body: file,
ACL: 'public-read'
};
bucket.putObject(params, function(err, data) {
if (err) {
results.innerHTML = 'ERROR: ' + err;
} else {
//listObjs();
results.append('SUCCESS! ' + fileName + ' uploaded. <br />');
}
});
}
});
}
} else {
results.append('Nothing to upload.');
}
}, false);
</script>
The original code example didn't have the loop and I'm wondering if this isn't working right because there isn't a mechanism to wait until the first file has finished before the loop starts the next upload.
If this is the answer, is there a way to check the upload status and wait until the first file is complete before the loop is allowed to continue?
If this isn't the answer, what else could be happening?
I am trying to upload an image (PNG) from my website to S3.
I am sending a multipart/form-data request to a Lambda, then parsing it using Busboy.
It uploads to S3 fine and shows that it is an image/png but if I download the file and try to view it, the file is invalid
What could cause that? I don't see where I'm going wrong here.
Code:
var AWS = require('aws-sdk');
var BluebirdPromise = require("bluebird");
var Busboy = require('busboy');
var s3 = BluebirdPromise.promisifyAll(new AWS.S3());
var str = require('string-to-stream');
const SavePFP = async((user_id, req) => {
var busboy = new Busboy({headers: req.headers});
let res = await(new Promise((resolve) => {
busboy.on('file', function (fieldname, file, filename, encoding, mimetype) {
console.log('File [' + fieldname + ']: filename: ' + filename);
file.on('data', function (data) {
console.log('File [' + fieldname + '] got ' + data.length + ' bytes');
resolve({
filename,
encoding,
mimetype,
data
});
});
file.on('end', function () {
console.log('File [' + fieldname + '] Finished');
});
});
str(req.rawBody).pipe(busboy);
}));
let {data, encoding, mimetype} = res;
var params = {
Bucket: '...',
Key: user_id,
Body: data,
ACL: 'public-read',
ContentEncoding: encoding,
ContentType: mimetype
};
console.log("Uploading to AWS S3...");
let response = await(s3.upload(params).promise());
console.log("[SavePFP]: " + JSON.stringify(response, null, 2));
if (response && response.Location) {
return response.Location;
}
});
module.exports = {
SavePFP
};
Thanks in advance!
I just avoided solving this altogether by converting the file to base64 and uploading it from there.
Thanks
I am currently trying to load images from my website to AWS S3. I have the functionality working where it uploads the image to the server but when i try to view the images they download instead of displaying. I read there is a way to set the file type so this would not happen. I am not sure how to do that. Any help would be great.
router.post('/heroes/createNewHeroes', function(req,res) {
var formidable = require('formidable'),
http = require('http'),
util = require('util');
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files) {
console.log(fields);
console.log(files);
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
var shortid = require('shortid');
var fs = require('fs');
var fileStream = fs.createReadStream(files.asset.path);
var newFilename = shortid.generate()+"_"+files.asset.name;
// Set your region for future requests.
AWS.config.region = 'us-west-2';
AWS.config.accessKeyId = 'access Key';
AWS.config.secretAccessKey = 'secret Key';
console.log(newFilename);
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3bucket = new AWS.S3({params: {Bucket: ' '}});
s3bucket.createBucket(function() {
var params = {Key: newFilename, Body: fileStream};
s3bucket.upload(params, function(err, data) {
if (err) {
console.log("Error uploading data: ", err);
} else {
console.log("Successfully uploaded data");
projectX.createHeroes(['plantTypes', 'asset', 'cost', 'energy', 'isSunProducer', 'isShooter', 'isExploding', 'sunFrequency', 'shootingFrequency', 'damage'], [fields.plantTypes, newFilename, fields.cost, fields.energy, fields.isSunProducer, fields.isShooter, fields.isExploding, fields.sunFrequency, fields.shootingFrequency, fields.damage], function(data){
res.redirect('/heroes')
});
}
});
});
});
});
});
var params = {Key: newFilename, ContentType: 'image/png', Body: fileStream};
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property
Just put "contentType: multerS3.AUTO_CONTENT_TYPE " . It will work .
Ex:
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'some-bucket',
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function (req, file, cb) {
cb(null, Date.now().toString())
}
})
})
Visit this link for more details https://github.com/badunk/multer-s3
This Helped me
storage: multerS3({
s3: s3,
bucket: "bucketname",
acl: "public-read",
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function(req, file, cb) {
console.log("req.file", file);
cb(null, `${Date.now()}-${file.originalname}`);
}
})
I have created an AWS Lambda function to resize any image that is put in my s3 bucket. I have created an event for every Object creation in s3 bucket for the Lambda function to trigger. My Handler code for Lambda is as follows:
var async = require('async'),
gm = require('gm').subClass({
imageMagick: true
}) // Enable ImageMagick integration.
, request = require('request').defaults({
encoding: null
});
var knox = require('knox');
var client = knox.createClient({
key: 'myKey',
secret: 'mySecretKey',
bucket: 'MyBucketName'
});
//hardcoded Value to get the Image url
var s3value = '.s3.amazonaws.com/';
var https = 'https://'
exports.handler = function (event, context) {
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var imageUrl = https + srcBucket + s3value + srcKey //My Http Url for image stored in s3 bucket.
request(imageUrl, function (err, res, res1) {
if (err) {
console.log(err);
} else {
gm(res1).resize(120)
.toBuffer('jpg', function (err, buffer) {
if (err) {
console.log(err);
} else {
var ImageName = "/" + imageUrl.substr(0, imageUrl.indexOf(".")) + "-1x" + imageUrl.substr(imageUrl.indexOf("."), imageUrl.length); //Renaming the present image
client.putBuffer(buffer, ImageName, function (err, response1) {
if (err) {
console.log(err);
} else {
console.log(response1.socket._httpMessage.url);
}
});
}
});
}
});
context.done();
}
My Problem is that when I store an image in S3 the resized image is not getting created in my s3 Bucket. I am unable to understand why I am unable to create a resized image in the same s3 bucket. Can somebody help me with it?
It looks like you are calling context.done() outside the callback function for request(). This will end the Lambda function before any image resizing completes. You should only call context.done() when all processing has completed or failed.
exports.handler = function (event, context) {
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var imageUrl = https + srcBucket + s3value + srcKey //My Http Url for image stored in s3 bucket.
request(imageUrl, function (err, res, res1) {
if (err) {
console.log(err);
context.fail(err);
} else {
gm(res1).resize(120)
.toBuffer('jpg', function (err, buffer) {
if (err) {
console.log(err);
context.fail(err);
} else {
var ImageName = "/" + imageUrl.substr(0, imageUrl.indexOf(".")) + "-1x" + imageUrl.substr(imageUrl.indexOf("."), imageUrl.length); //Renaming the present image
client.putBuffer(buffer, ImageName, function (err, response1) {
if (err) {
console.log(err);
context.fail(err);
} else {
console.log(response1.socket._httpMessage.url);
context.succeed("It worked");
}
});
}
});
}
});
// Don't call context.done() here, the callback hasn't run yet
//context.done();
}