I wrote this lambda function for creating new s3 bucket. I got success response, but there is no bucket created:
//Creating new s3 bucket
var AWS = require('aws-sdk');
AWS.config.update({
region: '',
accessKeyId: '',
secretAccessKey: ''
});
var s3 = new AWS.S3();
var bucketName = 'sample_bucket';
var keyName = 'sample_text.txt';
exports.handler = function uploadToS3(event, context, callback) {
s3.createBucket({Bucket: bucketName}, function() {
var params = {
Bucket: bucketName,
Key: keyName,
Body: 'Hello World!'
};
s3.putObject(params, function(err, data) {
if (err)
console.log(err);
else
console.log("Successfully uploaded data to " + bucketName);
});
});
callback(null,{ result : 'SUCCESS'});
};
Is there any problem in the code or do I have to provide some special access to
IAM user? I provided programmatic access to the IAM user.
Thanks!
Related
I am trying upload the file using Lambda to S3 way API GateWay, i am using the code below, the file is sent to S3 such as show the images 'test_using_postman.png' and 's3_file.png', however when I try open, the show message the file corrupted such as show the 'open_file_with_problem.png'. The images are below.
Somone know the happened ?
Thank you!
Images:
Test Using Postman: https://drive.google.com/open?id=1eenEnvuMQU28iI_Ltqzpw9OlCvIcY5Fg
S3 File: https://drive.google.com/open?id=1b1_CmIhzfc8mQj_rwCK6Xy30gzoP6HcK
Open File with problem: https://drive.google.com/open?id=1o54rLB9wWF1KxdUOkq3xAGVET7UWoqgf
Code NodeJS:
const crypto = require('crypto');
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
module.exports.arquivo_upload = (event, context, callback) => {
let BUCKET_NAME = 'XXXXX';
let fileContent = event.body;
let filePath = 'upload/';
let fileName = crypto.createHash('md5').update('niby_'+Date.now()).digest("hex");
s3 = new AWS.S3({apiVersion: '2006-03-01'});
var uploadParams = {
Bucket: BUCKET_NAME,
Key: filePath+fileName+'.png',
Body: fileContent,
ContentType: "image/png"
};
s3.upload(uploadParams, function (err, data) {
if (err) {
console.error(err);
callback(null,{
statusCode:400,
body: JSON.stringify(err),
});
} if (data) {
//TODO: Call other api to save file name
console.info(data.Location);
callback(null,{
statusCode:200,
body: JSON.stringify(data.Location),
});
}
});
}
I resolved this problem! I sending file using base64 to API Gateway and lambda functions setup parameter "ContentEncoding: 'base64'".
var uploadParams = {
Bucket: config.s3.bucket_name,
Key: config.s3.file_path+fileName+obj.extension,
Body: buf,
ContentEncoding: 'base64',
ContentType: obj.content_type,
ACL: "public-read"
};
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event) => {
var bucketName = 'arn:aws:s3:::alessio77';
var keyName = 'prova.txt';
var content = 'This is a sample text file';
var params = { 'Bucket': bucketName, 'Key': keyName, 'Body': content };
s3.putObject(params, function (err, data) {
console.log('entrato')
if (err)
console.log(err)
else
console.log("Successfully saved object to " + bucketName + "/" + keyName);
});
};
this code neither write a file nor give me an error
this is the log:
START RequestId: 7c93b1b9-73c1-4f18-9824-095bcbe292bf Version: $LATEST
END RequestId: 7c93b1b9-73c1-4f18-9824-095bcbe292bf
REPORT RequestId: 7c93b1b9-73c1-4f18-9824-095bcbe292bf Duration: 706.18 ms Billed Duration: 800 ms Memory Size: 128 MB Max Memory Used: 90 MB
The s3.putObject is async and you need to wait for it. Most all aws api calls returns an AWS.Request which can return a promise. Here is a solution using await.
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event) => {
var bucketName = 'arn:aws:s3:::alessio77';
var keyName = 'prova.txt';
var content = 'This is a sample text file';
var params = { 'Bucket': bucketName, 'Key': keyName, 'Body': content };
try {
console.log('entrato')
const data = await s3.putObject(params).promise();
console.log("Successfully saved object to " + bucketName + "/" + keyName);
} catch (err) {
console.log(err)
};
};
I have an AWS Lambda function which I am trying to use to take video placed into an S3 bucket and transcode it. I need the transcoded file to be encrypted. The function works fine for straight transcoding but when I attempt to add the encryption portion it fails silently. There are no errors in the logs but no file is output either. Any help would be appreciated. Below is the code in question.
'use strict';
var AWS = require('aws-sdk');
var s3 = new AWS.S3({
apiVersion: '2012–09–25'
});
var eltr = new AWS.ElasticTranscoder({
apiVersion: '2012–09–25',
region: 'us-east-1'
});
exports.handler = function(event, context) {
console.log('Executing Elastic Transcoder Orchestrator');
var bucket = event.Records[0].s3.bucket.name;
var key = event.Records[0].s3.object.key;
var pipelineId = 'PIPLINEID';
var split = key.split('.');
var nameandfolder = split[0];
var split2 = nameandfolder.split('/');
var name = split2[1];
console.log(bucket);
if (bucket !== 'project') {
context.fail('Incorrect Video Input Bucket');
return;
}
var params = {
PipelineId: pipelineId,
OutputKeyPrefix: 'Transcode_Output/',
Input: {
Key: key,
FrameRate: 'auto',
Resolution: 'auto',
AspectRatio: 'auto',
Interlaced: 'auto',
Container: 'auto'
},
Outputs: [{
Key: name + '.mp4',
PresetId: 'PRESETID',
Encryption: {
Mode: 's3-aws-kms',
},
}]
};
console.log('Starting Job');
eltr.createJob(params, function(err, data){
if (err){
console.log(err);
} else {
console.log(data);
}
context.succeed('Job well done');
});
};
These are my first steps in AWS in general and Cognito specifically. So please bear with me as I'm a bit confused by all the concepts and documention is not very easy to follow.
So I set up 3 lambda functions, one that creates a user, one that confirms a user and a last one that is supposed to authenticate the user.
The first 2 work fine, my user is created an confirmed. Now I'm stuck with the 3rd one which is supposed to return a token to be used in APIG, where I've set up a simple endpoint with my cognito authorizer.
Every token I get back returns Unauthorized when tested in the APIG/Authorizers/Cognito Pool Authorizers section.
My 'sign in' code is the following:
const AWS = require('aws-sdk');
exports.handler = (event, context, callback) => {
AWS.config.apiVersions = {
cognitoidentityserviceprovider: '2016-04-18'
};
AWS.config.region = 'us-east-1'; // Region
/*AWS.config.credentials = new AWS.CognitoIdentityCredentials({
IdentityPoolId: 'MY_IDENTITY_POOL_ID',
});*/
var identityId = null;
var params = {
IdentityPoolId: 'MY_IDENTITY_POOL_ID',
IdentityId: identityId,
Logins: {
'login.auth.MYPROJECT': 'MY_USERNAME'
},
TokenDuration: 86400
};
var cognito = new AWS.CognitoIdentity({
region: AWS.config.region
});
cognito.getOpenIdTokenForDeveloperIdentity(params, function(err, data) {
if (err) {
return callback(err);
}
else {
/*AWS.config.credentials = new AWS.CognitoIdentityCredentials({
IdentityPoolId: params.IdentityPoolId
});*/
AWS.config.credentials.get(function(){
// Credentials will be available when this function is called.
var accessKeyId = AWS.config.credentials.accessKeyId;
var secretAccessKey = AWS.config.credentials.secretAccessKey;
var sessionToken = AWS.config.credentials.sessionToken;
callback(null, {
identityId: data.IdentityId,
token: data.Token,
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey,
sessionToken: sessionToken
});
});
}
});
}
Both token and sessionToken return Unauthorized. Can someone tell me what is missing here?
Much appreciated.
EDIT 2016-11-15
The 'register' lambda code:
const AWS = require('aws-sdk');
exports.handler = (event, context, callback) => {
AWS.config.region = 'us-east-1'; // Region
AWS.config.credentials = new AWS.CognitoIdentityCredentials({
IdentityPoolId: 'MY_IDENTITY_POOL_ID',
});
var poolData = {
UserPoolId : 'MY_USER_POOL_ID',
ClientId : 'MY_CLIENT_ID'
};
var userPool = new AWS.CognitoIdentityServiceProvider(poolData);
var email = "myemail+" + Math.floor(Math.random() * (100 - 1) + 1) + "#example.com";
var params = {
ClientId: 'MY_CLIENT_ID',
Password: '1234567890',
Username: 'testaccount' + Math.floor(Math.random() * (100 - 1) + 1),
UserAttributes: [
{
Name: 'email',
Value: email
}
]
};
userPool.signUp(params, function(err, result){
if (err) {
console.log(err)
return;
}
callback(null, {
"message": "Hello from Lambda",
"data": result
});
});
};
My 'activate' lambda code is the following:
const AWS = require('aws-sdk');
exports.handler = (event, context, callback) => {
AWS.config.region = 'us-east-1'; // Region
AWS.config.credentials = new AWS.CognitoIdentityCredentials({
IdentityPoolId: 'MY_IDENTITY_POOL_ID',
});
var poolData = {
UserPoolId : 'MY_USER_POOL_ID',
ClientId : 'MY_CLIENT_ID'
};
var userPool = new AWS.CognitoIdentityServiceProvider(poolData);
var email = "email_address#example.com";
var params = {
ClientId: 'MY_CLIENT_ID',
Username: 'test_username',
ForceAliasCreation: false,
ConfirmationCode: '927000'
};
userPool.confirmSignUp(params, function(err, result){
if (err) {
console.log(err)
return;
}
callback(null, {
"message": "Hello from Lambda",
"data": result
});
});
};
In APIG, I created a Cognito User Pool Authorizer, selected my user pool, gave it a name, and set the identity token source to 'method.request.header.Authorization'.
In my APIG resource under the Method Request, I've set Authorization to my Cognito User Pool Authorizer. Additionally, API Key Required is set to true and I've a couple of keys I was testing with and that caused no issues.
I hope this covers everything.
I am currently trying to load images from my website to AWS S3. I have the functionality working where it uploads the image to the server but when i try to view the images they download instead of displaying. I read there is a way to set the file type so this would not happen. I am not sure how to do that. Any help would be great.
router.post('/heroes/createNewHeroes', function(req,res) {
var formidable = require('formidable'),
http = require('http'),
util = require('util');
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files) {
console.log(fields);
console.log(files);
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
var shortid = require('shortid');
var fs = require('fs');
var fileStream = fs.createReadStream(files.asset.path);
var newFilename = shortid.generate()+"_"+files.asset.name;
// Set your region for future requests.
AWS.config.region = 'us-west-2';
AWS.config.accessKeyId = 'access Key';
AWS.config.secretAccessKey = 'secret Key';
console.log(newFilename);
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3bucket = new AWS.S3({params: {Bucket: ' '}});
s3bucket.createBucket(function() {
var params = {Key: newFilename, Body: fileStream};
s3bucket.upload(params, function(err, data) {
if (err) {
console.log("Error uploading data: ", err);
} else {
console.log("Successfully uploaded data");
projectX.createHeroes(['plantTypes', 'asset', 'cost', 'energy', 'isSunProducer', 'isShooter', 'isExploding', 'sunFrequency', 'shootingFrequency', 'damage'], [fields.plantTypes, newFilename, fields.cost, fields.energy, fields.isSunProducer, fields.isShooter, fields.isExploding, fields.sunFrequency, fields.shootingFrequency, fields.damage], function(data){
res.redirect('/heroes')
});
}
});
});
});
});
});
var params = {Key: newFilename, ContentType: 'image/png', Body: fileStream};
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property
Just put "contentType: multerS3.AUTO_CONTENT_TYPE " . It will work .
Ex:
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'some-bucket',
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function (req, file, cb) {
cb(null, Date.now().toString())
}
})
})
Visit this link for more details https://github.com/badunk/multer-s3
This Helped me
storage: multerS3({
s3: s3,
bucket: "bucketname",
acl: "public-read",
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function(req, file, cb) {
console.log("req.file", file);
cb(null, `${Date.now()}-${file.originalname}`);
}
})