aws lambda write a file on s3 - javascript - amazon-web-services

var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event) => {
var bucketName = 'arn:aws:s3:::alessio77';
var keyName = 'prova.txt';
var content = 'This is a sample text file';
var params = { 'Bucket': bucketName, 'Key': keyName, 'Body': content };
s3.putObject(params, function (err, data) {
console.log('entrato')
if (err)
console.log(err)
else
console.log("Successfully saved object to " + bucketName + "/" + keyName);
});
};
this code neither write a file nor give me an error
this is the log:
START RequestId: 7c93b1b9-73c1-4f18-9824-095bcbe292bf Version: $LATEST
END RequestId: 7c93b1b9-73c1-4f18-9824-095bcbe292bf
REPORT RequestId: 7c93b1b9-73c1-4f18-9824-095bcbe292bf Duration: 706.18 ms Billed Duration: 800 ms Memory Size: 128 MB Max Memory Used: 90 MB

The s3.putObject is async and you need to wait for it. Most all aws api calls returns an AWS.Request which can return a promise. Here is a solution using await.
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event) => {
var bucketName = 'arn:aws:s3:::alessio77';
var keyName = 'prova.txt';
var content = 'This is a sample text file';
var params = { 'Bucket': bucketName, 'Key': keyName, 'Body': content };
try {
console.log('entrato')
const data = await s3.putObject(params).promise();
console.log("Successfully saved object to " + bucketName + "/" + keyName);
} catch (err) {
console.log(err)
};
};

Related

Lambda function not calling S3 bucket upload

My Lambda function does not call the S3 upload function where it is supposed to send a URL back that will be assigned to the DynamoDB database. I can't seem to pin-point what's wrong here. I have tried to just call the Lambda upload to S3 function without the rest of the code and it work's fine.
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const BUCKET_NAME = 'BUCKET_NAME';
const dynamo = new AWS.DynamoDB.DocumentClient();
exports.handler = async (event, context) => {
let body;
let statusCode = 200;
const uploadFileToS3 = async (fileBinary) => {
try {
const base64File = fileBinary;
const decodedFile = Buffer.from(
base64File.replace(/^data:image\/\w+;base64,/, ''),
'base64'
);
const params = {
Bucket: BUCKET_NAME,
Key: `images/${new Date().toISOString()}.jpeg`,
Body: decodedFile,
ContentType: 'image/jpeg',
};
const uploadResult = await s3.upload(params).promise();
console.log(uploadResult)
return uploadResult;
} catch (e) {
console.error(e);
}
};
try {
switch (event.routeKey) {
case 'PUT /items':
let requestJSON = JSON.parse(event.body);
const fileURL = await uploadFileToS3(requestJSON.itemPicture);
await dynamo
.put({
TableName: 'TABLE_NAME',
Item: {
itemId: requestJSON.itemId,
userId: requestJSON.userId,
itemTitle: requestJSON.itemTitle,
itemDesc: requestJSON.itemDesc,
itemLocation: requestJSON.itemLocation,
itemPrice: requestJSON.itemPrice,
itemPicture: fileURL,
},
})
.promise();
body = `Put item ${requestJSON.itemId}`;
break;
default:
throw new Error(`Unsupported route: ` + `${event.routeKey}`);
}
} catch (err) {
statusCode = 400;
body = err.message;
} finally {
body = JSON.stringify(body);
}
return {
statusCode,
body,
headers,
};
};

Having Issues with external http requests on AWS Lambda

I'm using cities stored in a DynamoDB table to make calls to an external weather API to determine whether it will rain in that city on that day. It then calls a separate Lambda that uses that info to message subscribers of an SNS topic that it will rain.
const AWS = require('aws-sdk');
AWS.config.update({ region: 'eu-west-2' });
const lambda = new AWS.Lambda();
const axios = require('axios');
const tableName = process.env.CITY_TABLE;
const docClient = new AWS.DynamoDB.DocumentClient();
const publishMessageLambda = process.env.PUBLISH_MESSAGE_LAMBDA_NAME;
const weatherApiKey = process.env.WEATHER_API_KEY;
exports.handler = async (event) => {
const scanParams = {
TableName: tableName,
AttributesToGet: ['city'],
};
try {
let citiesArr = await docClient.scan(scanParams).promise();
citiesArr.Items.forEach(async (cityObj) => {
// console.log('cityObj', cityObj);
// console.log('city', cityObj.city);
// console.log('weatherApiKey:', weatherApiKey);
let weatherReport = await axios({
method: 'get',
url: 'http://api.weatherapi.com/v1/forecast.json',
params: {
key: weatherApiKey,
q: cityObj.city,
days: 1,
},
});
// console.log(weatherReport);
let city = cityObj.city;
let dailyChanceOfRain = Number(
weatherReport.data.forecast.forecastday[0].day.daily_chance_of_rain
);
let totalPrecip =
weatherReport.data.forecast.forecastday[0].day.totalprecip_mm;
let lambdaParams = {
FunctionName: publishMessageLambda,
InvocationType: 'RequestResponse',
Payload: JSON.stringify({
body: { dailyChanceOfRain, totalPrecip, city },
}),
};
console.log('dailyChanceOfRain: ', dailyChanceOfRain);
console.log('totalPrecip: ', totalPrecip);
if (dailyChanceOfRain > 50 && totalPrecip > 3) {
let data = await lambda.invoke(lambdaParams).promise();
}
// console.log(
// 'weatherReport: ',
// weatherReport.data.forecast.forecastday[0].day
// );
});
} catch (error) {
console.log(error);
}
};
Running the function locally produces the desired logs on the command line however when running the function on Lambda the logs involving the external http request don't show on the cold start and the logs only show up once the function has been invoked multiple times and often not together.
Any advice would be much appreciated.

Creating s3 bucket with lambda function

I wrote this lambda function for creating new s3 bucket. I got success response, but there is no bucket created:
//Creating new s3 bucket
var AWS = require('aws-sdk');
AWS.config.update({
region: '',
accessKeyId: '',
secretAccessKey: ''
});
var s3 = new AWS.S3();
var bucketName = 'sample_bucket';
var keyName = 'sample_text.txt';
exports.handler = function uploadToS3(event, context, callback) {
s3.createBucket({Bucket: bucketName}, function() {
var params = {
Bucket: bucketName,
Key: keyName,
Body: 'Hello World!'
};
s3.putObject(params, function(err, data) {
if (err)
console.log(err);
else
console.log("Successfully uploaded data to " + bucketName);
});
});
callback(null,{ result : 'SUCCESS'});
};
Is there any problem in the code or do I have to provide some special access to
IAM user? I provided programmatic access to the IAM user.
Thanks!

AWS S3 - How to stop images from force downloading instead of displaying.

I am currently trying to load images from my website to AWS S3. I have the functionality working where it uploads the image to the server but when i try to view the images they download instead of displaying. I read there is a way to set the file type so this would not happen. I am not sure how to do that. Any help would be great.
router.post('/heroes/createNewHeroes', function(req,res) {
var formidable = require('formidable'),
http = require('http'),
util = require('util');
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files) {
console.log(fields);
console.log(files);
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
var shortid = require('shortid');
var fs = require('fs');
var fileStream = fs.createReadStream(files.asset.path);
var newFilename = shortid.generate()+"_"+files.asset.name;
// Set your region for future requests.
AWS.config.region = 'us-west-2';
AWS.config.accessKeyId = 'access Key';
AWS.config.secretAccessKey = 'secret Key';
console.log(newFilename);
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3bucket = new AWS.S3({params: {Bucket: ' '}});
s3bucket.createBucket(function() {
var params = {Key: newFilename, Body: fileStream};
s3bucket.upload(params, function(err, data) {
if (err) {
console.log("Error uploading data: ", err);
} else {
console.log("Successfully uploaded data");
projectX.createHeroes(['plantTypes', 'asset', 'cost', 'energy', 'isSunProducer', 'isShooter', 'isExploding', 'sunFrequency', 'shootingFrequency', 'damage'], [fields.plantTypes, newFilename, fields.cost, fields.energy, fields.isSunProducer, fields.isShooter, fields.isExploding, fields.sunFrequency, fields.shootingFrequency, fields.damage], function(data){
res.redirect('/heroes')
});
}
});
});
});
});
});
var params = {Key: newFilename, ContentType: 'image/png', Body: fileStream};
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property
Just put "contentType: multerS3.AUTO_CONTENT_TYPE " . It will work .
Ex:
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'some-bucket',
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function (req, file, cb) {
cb(null, Date.now().toString())
}
})
})
Visit this link for more details https://github.com/badunk/multer-s3
This Helped me
storage: multerS3({
s3: s3,
bucket: "bucketname",
acl: "public-read",
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function(req, file, cb) {
console.log("req.file", file);
cb(null, `${Date.now()}-${file.originalname}`);
}
})

AWS Lambda Get Image and Upload to S3

I am working in a AWS Lambda function. I am successfully making an API call to the NASA APOD and getting back the values. I want to take the url for the image and download that image and then upload into S3. I am getting an error when I try to access the "test.jpg" image, "Error: EACCES: permission denied, open 'test.jpg'". If I move the S3bucket.putObject outside the http.request, I get data is equal to null. I know I am missing something simple. Thought?
function GetAPOD(intent, session, callback) {
var nasa_api_key = 'demo-key'
, nasa_api_path = '/planetary/apod?api_key=' + nasa_api_key;
var options = {
host: 'api.nasa.gov',
port: 443,
path: nasa_api_path,
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf-8');
var responseString = '';
res.on('data', function (data) {
responseString += data;
});
res.on('end', function () {
console.log('API Response: ' + responseString);
var responseObject = JSON.parse(responseString)
, image_date = responseObject['date']
, image_title = responseObject['title']
, image_url = responseObject['url']
, image_hdurl = responseObject['hdurl']
, image_desc = responseObject['explanation'];
var s3Bucket = new AWS.S3( { params: {Bucket: 'nasa-apod'} } );
var fs = require('fs');
var file = fs.createWriteStream("test.jpg");
var request = http.get(image_url, function(response) {
response.pipe(file);
var data = {Key: "test.jpg", Body: file};
s3Bucket.putObject(data, function(err, data) {
if (err) {
console.log('Error uploading data: ', data);
}
else {
console.log('succesfully uploaded the image!');
}
});
});
});
});
req.on('error', function (e) {
console.error('HTTP error: ' + e.message);
});
//req.write();
req.end();
}
You need to be writing the file to /tmp. That's the only directory in the Lambda environment that you will have write access to.
I got it!! Thank you Mark B for the help. I was able to get the data from the stream without saving it locally and then writing to the bucket. I did have to change my IAM role to allow the putObject for S3.
function GetAPOD(intent, session, callback) {
var nasa_api_key = 'demo-key'
, nasa_api_path = '/planetary/apod?api_key=' + nasa_api_key;
var options = {
host: 'api.nasa.gov',
port: 443,
path: nasa_api_path,
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf-8');
var responseString = '';
res.on('data', function (data) {
responseString += data;
});
res.on('end', function () {
// console.log('API Response: ' + responseString);
var responseObject = JSON.parse(responseString)
, image_date = responseObject['date']
, image_title = responseObject['title']
, image_url = responseObject['url']
, image_hdurl = responseObject['hdurl']
, image_desc = responseObject['explanation'];
var image_name = image_date + '.jpg';
var s3 = new AWS.S3();
var s3Bucket = new AWS.S3( { params: {Bucket: 'nasa-apod'} } );
var request = http.get(image_url, function(response) {
var image_stream = null;
response.on('data', function (data) {
image_stream = data;
});
response.on('end', function () {
var param_data = {Key: image_name, Body: image_stream, ContentType: "image/jpeg", ContentLength: response.headers['content-length']};
s3Bucket.putObject(param_data, function(err, output_data) {
if (err) {
console.log('Error uploading data to S3: ' + err);
}
});
});
});
request.end();
});
});
req.on('error', function (e) {
console.error('HTTP error: ' + e.message);
});
req.end();
}