AWS S3 JS SDK to upload multiple files from a browser - amazon-web-services

I want to use the AWS S3 JS SDK to upload multiple files from a browser. I can get one file just fine, but can't get multiple. When I select mulitple files, the last file is the only one to get uploaded. Here's the code:
//head
<script src="https://sdk.amazonaws.com/js/aws-sdk-2.213.1.min.js"></script>
<script type="text/javascript" src="https://code.jquery.com/jquery-3.1.1.min.js"></script>
//body
<input type="file" id="file-chooser" multiple />
<button id="upload-button">Upload to S3</button>
<div id="results"></div>
<script type="text/javascript">
AWS.config.region = 'us-east-1';
AWS.config.credentials = new AWS.CognitoIdentityCredentials({
IdentityPoolId: '###'
});
AWS.config.credentials.get(function(err) {
if (err) alert(err);
console.log(AWS.config.credentials);
});
var bucketName = 'c2networkingfiles'; // Enter your bucket name
var bucket = new AWS.S3({
params: {
Bucket: bucketName
}
});
var fileChooser = document.getElementById('file-chooser');
var button = document.getElementById('upload-button');
var results = document.getElementById('results');
button.addEventListener('click', function() {
var fileArr = fileChooser.files;
if (fileArr[0]) {
for (k=0; k<fileArr.length; k++) {
var file = fileArr[k];
var fileName = file.name;
// test to see if file already exists
var objKey2 = 'testing/' + file.name;
var objE = new AWS.S3();
var params2 = {
Bucket: bucketName,
Key: objKey2
};
objE.headObject(params2, function(err, data) {
if (data) {
results.innerHTML = 'File is present. Uploaded on ' + data.LastModified;
//console.log(data);
} else {
//results.innerHTML = '';
var objKey = 'testing/' + file.name;
var params = {
Key: objKey,
ContentType: file.type,
Body: file,
ACL: 'public-read'
};
bucket.putObject(params, function(err, data) {
if (err) {
results.innerHTML = 'ERROR: ' + err;
} else {
//listObjs();
results.append('SUCCESS! ' + fileName + ' uploaded. <br />');
}
});
}
});
}
} else {
results.append('Nothing to upload.');
}
}, false);
</script>
The original code example didn't have the loop and I'm wondering if this isn't working right because there isn't a mechanism to wait until the first file has finished before the loop starts the next upload.
If this is the answer, is there a way to check the upload status and wait until the first file is complete before the loop is allowed to continue?
If this isn't the answer, what else could be happening?

Related

Cognito User Pools is returning null user object

Folks, I am getting similar error where my getCurrentUser is returning null. I think, I partially know the reason for the same.
<script type="text/javascript" src="https://sdk.amazonaws.com/js/aws-sdk-2.7.16.min.js" ></script>
<script type="text/javascript" src="/web/amazon-cognito-identity.js"></script>
<script type="text/javascript" src="/web/aws-cognito-sdk.js"></script>
import * as AWSCognito from 'amazon-cognito-identity';
const region = 'us-east-1';
const docClient = createDynamoDbClient();
function createDynamoDbClient() {
AWS.config.update({
region: "us-east-1", endpoint: 'https://dynamodb.us-east-1.amazonaws.com'});
userAuth();
return new AWS.DynamoDB();
}
function userAuth() {
var data = {
UserPoolId: '***********',
ClientId: '************'
};
var userPool = new AWSCognito.CognitoIdentityServiceProvider.CognitoUserPool(data);
var cognitoUser = userPool.getCurrentUser();
try {
if (cognitoUser != null)
{
cognitoUser.getSession(function(err, session)
{
if (err) {
console.log(err);
return;
}
AWS.config.region = 'us-east-1';
AWS.config.credentials = new AWS.CognitoIdentityCredentials(
{
IdentityPoolId : 'us-east-1:********',
Logins : {
"cognito-idp.us-east-1.amazonaws.com/us-east-1_*******": session.getIdToken().getJwtToken()
}
});
AWS.config.credentials.get(function(err) {
if (!err)
{
console.log("In try block 4");
var id = AWS.config.credentials.identityId;
console.log('Cognito Identity ID '+ id);
var docClient = new AWS.DynamoDB.DocumentClient(
{
region: AWS.config.region
}
);
var params =
{
TableName: 'VideoInfo',
Item:{
keyName:kN,
technicalMetadata:tM
}
};
docClient.put(
params,
function(err, data) {
if (err){
console.error(err);
}else
{
console.log(data);
}
}
);
}
});
});
}
else {
console.log(cognitoUser);
return;
}
} catch (e) {
console.log(e);
return;
}
}
I am getting the below error essentially which is causing the complete issue.
TypeError: root.AWSCognito is undefined
ReferenceError: AmazonCognitoIdentity is not defined
Possible, it means I am not getting AWSCognito object itself using which I can get the correct UserPool and ending up with null user.
Spent a lot of time on this today and really appreciate if someone can help on the same!
enter image description here

Not uploading video files in AWS S3 bucket using javascript

I trying to upload video files in AWS S3 bucket using javascript AWS SDK , but not able to upload
JS CODE :
function uploadFile(dir) {
var files = document.getElementById('photoupload').files;
if (!files.length) {
return alert('Please choose a file to
upload first.');
}
var file = files[0];
var fileName = file.name;
var albumPhotosKey = encodeURIComponent(dir) + '//';
var photoKey = albumPhotosKey + fileName;
s3.putObject({
Key: photoKey,
Body: file,
ContentType: file.type,
ACL: 'public-read'
}, function (err, data) {
if (err) {
return alert('There was an error
uploading your file: ', err.message);
}
alert('Successfully uploaded file.');
// viewAlbum(albumName);
});
}

AWS S3 - How to stop images from force downloading instead of displaying.

I am currently trying to load images from my website to AWS S3. I have the functionality working where it uploads the image to the server but when i try to view the images they download instead of displaying. I read there is a way to set the file type so this would not happen. I am not sure how to do that. Any help would be great.
router.post('/heroes/createNewHeroes', function(req,res) {
var formidable = require('formidable'),
http = require('http'),
util = require('util');
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files) {
console.log(fields);
console.log(files);
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
var shortid = require('shortid');
var fs = require('fs');
var fileStream = fs.createReadStream(files.asset.path);
var newFilename = shortid.generate()+"_"+files.asset.name;
// Set your region for future requests.
AWS.config.region = 'us-west-2';
AWS.config.accessKeyId = 'access Key';
AWS.config.secretAccessKey = 'secret Key';
console.log(newFilename);
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3bucket = new AWS.S3({params: {Bucket: ' '}});
s3bucket.createBucket(function() {
var params = {Key: newFilename, Body: fileStream};
s3bucket.upload(params, function(err, data) {
if (err) {
console.log("Error uploading data: ", err);
} else {
console.log("Successfully uploaded data");
projectX.createHeroes(['plantTypes', 'asset', 'cost', 'energy', 'isSunProducer', 'isShooter', 'isExploding', 'sunFrequency', 'shootingFrequency', 'damage'], [fields.plantTypes, newFilename, fields.cost, fields.energy, fields.isSunProducer, fields.isShooter, fields.isExploding, fields.sunFrequency, fields.shootingFrequency, fields.damage], function(data){
res.redirect('/heroes')
});
}
});
});
});
});
});
var params = {Key: newFilename, ContentType: 'image/png', Body: fileStream};
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property
Just put "contentType: multerS3.AUTO_CONTENT_TYPE " . It will work .
Ex:
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'some-bucket',
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function (req, file, cb) {
cb(null, Date.now().toString())
}
})
})
Visit this link for more details https://github.com/badunk/multer-s3
This Helped me
storage: multerS3({
s3: s3,
bucket: "bucketname",
acl: "public-read",
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function(req, file, cb) {
console.log("req.file", file);
cb(null, `${Date.now()}-${file.originalname}`);
}
})

AWS Lambda Get Image and Upload to S3

I am working in a AWS Lambda function. I am successfully making an API call to the NASA APOD and getting back the values. I want to take the url for the image and download that image and then upload into S3. I am getting an error when I try to access the "test.jpg" image, "Error: EACCES: permission denied, open 'test.jpg'". If I move the S3bucket.putObject outside the http.request, I get data is equal to null. I know I am missing something simple. Thought?
function GetAPOD(intent, session, callback) {
var nasa_api_key = 'demo-key'
, nasa_api_path = '/planetary/apod?api_key=' + nasa_api_key;
var options = {
host: 'api.nasa.gov',
port: 443,
path: nasa_api_path,
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf-8');
var responseString = '';
res.on('data', function (data) {
responseString += data;
});
res.on('end', function () {
console.log('API Response: ' + responseString);
var responseObject = JSON.parse(responseString)
, image_date = responseObject['date']
, image_title = responseObject['title']
, image_url = responseObject['url']
, image_hdurl = responseObject['hdurl']
, image_desc = responseObject['explanation'];
var s3Bucket = new AWS.S3( { params: {Bucket: 'nasa-apod'} } );
var fs = require('fs');
var file = fs.createWriteStream("test.jpg");
var request = http.get(image_url, function(response) {
response.pipe(file);
var data = {Key: "test.jpg", Body: file};
s3Bucket.putObject(data, function(err, data) {
if (err) {
console.log('Error uploading data: ', data);
}
else {
console.log('succesfully uploaded the image!');
}
});
});
});
});
req.on('error', function (e) {
console.error('HTTP error: ' + e.message);
});
//req.write();
req.end();
}
You need to be writing the file to /tmp. That's the only directory in the Lambda environment that you will have write access to.
I got it!! Thank you Mark B for the help. I was able to get the data from the stream without saving it locally and then writing to the bucket. I did have to change my IAM role to allow the putObject for S3.
function GetAPOD(intent, session, callback) {
var nasa_api_key = 'demo-key'
, nasa_api_path = '/planetary/apod?api_key=' + nasa_api_key;
var options = {
host: 'api.nasa.gov',
port: 443,
path: nasa_api_path,
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf-8');
var responseString = '';
res.on('data', function (data) {
responseString += data;
});
res.on('end', function () {
// console.log('API Response: ' + responseString);
var responseObject = JSON.parse(responseString)
, image_date = responseObject['date']
, image_title = responseObject['title']
, image_url = responseObject['url']
, image_hdurl = responseObject['hdurl']
, image_desc = responseObject['explanation'];
var image_name = image_date + '.jpg';
var s3 = new AWS.S3();
var s3Bucket = new AWS.S3( { params: {Bucket: 'nasa-apod'} } );
var request = http.get(image_url, function(response) {
var image_stream = null;
response.on('data', function (data) {
image_stream = data;
});
response.on('end', function () {
var param_data = {Key: image_name, Body: image_stream, ContentType: "image/jpeg", ContentLength: response.headers['content-length']};
s3Bucket.putObject(param_data, function(err, output_data) {
if (err) {
console.log('Error uploading data to S3: ' + err);
}
});
});
});
request.end();
});
});
req.on('error', function (e) {
console.error('HTTP error: ' + e.message);
});
req.end();
}

"Missing credentials in config" Returned From Amazon S3 JavaScript SDK Server

I've been following the boilerplate code at http://aws.amazon.com/developers/getting-started/browser/ to get CORS uploading to work with my S3 account.
I've created a Facebook App, changed the CORS configuration XML for my S3 bucket, and filled in the appropriate variables in the JavaScript code. But when I try to upload a file through my webpage, I get the Error: Missing credentials in config response.
Can someone point me in the right direction to debugging this?
My JS:
var appId = '999943416325248';
var roleArn = 'arn:aws:iam::458182047307:role/s3-test';
var bucketName = 'my-bucket';
var fbUserId;
var bucket = new AWS.S3({
params: {
Bucket: bucketName
}
});
var fileChooser = document.getElementById('video-file-input');
var button = document.getElementById('submit-button');
var results = document.getElementById('results');
button.addEventListener('click', function () {
var file = fileChooser.files[0];
if(file){
results.innerHTML = '';
//Object key will be facebook-USERID#/FILE_NAME
var objKey = 'facebook-' + fbUserId + '/' + file.name;
var params = {
Key: objKey,
ContentType: file.type,
Body: file,
ACL: 'public-read'
};
bucket.putObject(params, function (err, data) {
if(err){
results.innerHTML = 'ERROR: ' + err;
}
else{
listObjs();
}
});
}
else{
results.innerHTML = 'Nothing to upload.';
}
}, false);
function listObjs() {
var prefix = 'facebook-' + fbUserId;
bucket.listObjects({
Prefix: prefix
}, function (err, data) {
if(err){
results.innerHTML = 'ERROR: ' + err;
}
else{
var objKeys = "";
data.Contents.forEach(function (obj) {
objKeys += obj.Key + "<br>";
});
results.innerHTML = objKeys;
}
});
}
/*!
* Login to your application using Facebook.
* Uses the Facebook SDK for JavaScript available here:
* https://developers.facebook.com/docs/javascript/gettingstarted/
*/
window.fbAsyncInit = function () {
FB.init({
appId: appId
});
FB.login(function (response) {
bucket.config.credentials = new AWS.WebIdentityCredentials({
ProviderId: 'graph.facebook.com',
RoleArn: roleArn,
WebIdentityToken: response.authResponse.accessToken
});
fbUserId = response.authResponse.userID;
button.style.display = 'block';
});
};
// Load the Facebook SDK asynchronously
(function (d, s, id) {
var js, fjs = d.getElementsByTagName(s)[0];
if(d.getElementById(id)){
return;
}
js = d.createElement(s);
js.id = id;
js.src = "//connect.facebook.net/en_US/all.js";
fjs.parentNode.insertBefore(js, fjs);
}(document, 'script', 'facebook-jssdk'));
Turns out Chrome was blocking pop-ups on my domain so Facebook authentication couldn't process. Once I allowed pop-ups from the domain, everything worked as expected