What is the way to get the response body of the uploaded JSON file to the lambda function to print it?
I used the following code but it is specified for Content Type. Any suggestions for this please?
// console.log('Loading function');
const aws = require('aws-sdk');
const s3 = new aws.S3({ apiVersion: '2006-03-01' });
exports.handler = async (event, context) => {
//console.log('Received event:', JSON.stringify(event, null, 2));
// Get the object from the event and show its content type
const bucket = event.Records[0].s3.bucket.name;
const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: bucket,
Key: key,
};
try {
const { ContentType } = await s3.getObject(params).promise();
console.log('CONTENT TYPE:', ContentType);
console.log('Body: ', );
console.log("response: " + "I want to print the response body here when a JSON file uploaded")
return ContentType;
} catch (err) {
console.log(err);
const message = `Error getting object ${key} from bucket ${bucket}. Error : ` + err;
console.log(message);
throw new Error(message);
}
};
return value of getObject contains Body field.
const { ContentType, Body } = await s3.getObject(params).promise();
console.log('CONTENT TYPE:', ContentType);
console.log('Body: ', Body);
Related
const S3 = require('aws-sdk/clients/s3');
const { getSignedUrl } = require("#aws-sdk/s3-request-presigner");
const { S3Client, GetObjectCommand } = require("#aws-sdk/client-s3");
dotenv.config();
const bucketName = process.env.AWS_BUCKET_NAME
const region = process.env.AWS_BUCKET_REGION
const accessKeyId = process.env.AWS_ACCESS_KEY
const secretAccessKey = process.env.AWS_SECRET_KEY
const s3 = new S3({
region,
accessKeyId,
secretAccessKey
})
router.get("/:id", async (req, res) => {
try {
const post = await Post.findById(req.params.id);
const getObjectParams = {
Bucket: bucketName,
Key: post.photo,
}
const command = new GetObjectCommand(getObjectParams);
const url = await getSignedUrl(s3, command, { expiresIn: 3600 });
post.imageUrl = url
res.status(200).json(post);
} catch (err) {
console.error('errorrr', err);
res.status(500).json(err);
}
});
Here is my code I've console logged post, getObjectParams, command and everything is there but when I console log url it's not logging and when I console.log errorrr it logs Cannot read properties of undefined (reading 'clone')
What is the issue here?
I think issue is with function getSignedUrl, but not sure what it is
Cypress.Commands.add('verifyfilesinS3', (filename) => {
const AWS = require('aws-sdk');
var fileExists;
AWS.config.update({
accessKeyId: "",
accessSecretKey: "",
region: ""
});
const s3 = new AWS.S3();
const params = {
Bucket: "",
Key: ""
};
const result = s3.headObject(params, (err, data) => { // Check if the file exists
if (err) {
if (err.code === 'NotFound') {
console.log(`File "${params.Key}" does not exist in the "${params.Bucket}" bucket.`);
fileExists = 0
} else {
console.error(err);
fileExists = err
}
} else {
fileExists = 2
console.log(`File "${params.Key}" exists in the "${params.Bucket}" bucket.`);
}
});
return fileExists;
});
I am first trying to directly hardcode the values and test but still fails with the error "CredentialsError: Missing credentials in config, if using AWS_CONFIG_FILE, set AWS_SDK_LOAD_CONFIG=1"
enter image description here
When I run the same code individually in node.js , it works as expected.
On button click I have programmed to call a graphql api which is connected to a Lambda function and the function is pulling data from a dynamodb table. The query does not produce any error, but it doesn't give me any results as well. I have also checked the cloudwatch logs and I dont see any traces of the function being called. Not sure on the careless mistake I am making here.
Here is my api
void findUser() async {
try {
String graphQLDocument = '''query getUserById(\$userId: ID!) {
getUserById(userId: \$id) {
id
name
}
}''';
var operation = Amplify.API.query(
request: GraphQLRequest<String>(
document: graphQLDocument,
variables: {'id': 'USER-14160000000'}));
var response = await operation.response;
var data = response.data;
print('Query result: ' + data);
} on ApiException catch (e) {
print('Query failed: $e');
}
}
Here is my lambda function -
const getUserById = require('./user-queries/getUserById');
exports.handler = async (event) => {
var userId = event.arguments.userId;
var name = event.arguments.name;
var avatarUrl = event.arguments.avatarUrl;
//console.log('Received Event - ', JSON.stringify(event,3));
console.log(userId);
switch(event.info.fieldName) {
case "getUserById":
return getUserById(userId);
}
};
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region: 'ca-central-1'});
async function getUserById(userId) {
const params = {
TableName:"Bol-Table",
KeyConditionExpression: 'pk = :hashKey and sk = :sortKey',
ExpressionAttributeValues: {
':hashKey': userId,
':sortKey': 'USER'
}
};
try {
const Item = await docClient.query(params).promise();
console.log(Item);
return {
id: Item.Items[0].pk,
name: Item.Items[0].details.displayName,
avatarUrl: Item.Items[0].details.avatarUrl,
createdAt: Item.Items[0].details.createdAt,
updatedAt: Item.Items[0].details.updatedAt
};
} catch(err) {
console.log("BOL Error: ", err);
}
}
module.exports = getUserById;
Upon button click I get this
Moving my comment to an answer:
Can you try changing your graphQLDocumnet to
String graphQLDocument = '''query getUserById(\$id: ID!) {
getUserById(userId: \$id) {
id
name
}
}''';
Your variable is $userId and then $id. Try calling it $id in both places like in your variables object.
Your flutter code is working fine but in lambda from the aws is returning blank string "" to not to print anything
I have s3 bucket as below,
myBucket
a/
b/
c/
where myBucket is a s3 bucket and a, b, c are the key folders inside that bucket.
I will upload images into a/. The s3 event notification will trigger SQS which will then trigger lambda function which does the process of removing image background and uploads into b/ folder.
The problem here is for example, if I upload a folder which has around 26 images into s3 only 23 or 22 images are getting triggered by lambda and only those images are getting are processed.
For some reason s3 is not triggering all the images or is that something I should configure in my lambda function?
Here is my function code
exports.handler = async(event, context, callback) => {
try {
console.log(event.Records[0]);
var json = JSON.parse(event.Records[0]['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = json['Records'][0]['s3'];
console.log(json);
var srcBucket = json['bucket']['name'];
console.log('srcBucket: ' + srcBucket);
var srcKey = decodeURIComponent(json['object']['key'].replace(/\+/g, ' '));
console.log('srcKey: ' + srcKey);
var str = (srcKey.split('/').pop()).split('.')[0];
console.log('str: ' + str);
if (str != '') {
var folderPath = srcKey.substr(srcKey.indexOf('/') + 1).split('.')[0];
folderPath = folderPath.substring(0, folderPath.lastIndexOf('/'));
console.log('folderPath: ' + folderPath);
const params1 = { Bucket: srcBucket, Key: srcKey };
var origimage = await s3.getObject(params1).promise();
var destObject = await origimage.Body;
var destKey = 'removebg/' + folderPath + '/' + str + '.jpg';
var options = {
'method': 'POST',
'url': 'https://api.remove.bg/v1.0/removebg',
'headers': {
'X-Api-Key': 'xxxxxxxxxxx'
},
formData: {
'image_file': destObject,
'size': 'auto'
},
encoding: null
};
request(options, function(error, response, body) {
if (error) {
console.log(error);
sendmessage(error, 'Error removing image background', arn, srcBucket + '/' + srcKey, destBucket + destKey);
}
var params = { Bucket: destBucket, Key: destKey, Body: body };
s3.upload(params, function(err, data) {
if (err) {
console.log('Error uploading data: ', err);
sendmessage(err, 'Error uploading transparent image to s3', arn, srcBucket + '/' + srcKey, destBucket + destKey);
}
else { console.log('Successfully uploaded data to ' + destBucket); }
});
});
}
}
catch (e) {
console.log(e);
}
callback(null, 'All done!');
};
Please let me know. Thanks in advance.
I think your problem lies on these lines:
console.log(event.Records[0]);
var json = JSON.parse(event.Records[0]['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = json['Records'][0]['s3'];
Your function is only looking at the first record that is provided to the function. Multiple events can be given the the Lambda function, so your function should loop through the Records entries and process all of the events that are provided.
It should do something like:
for record in event.Records:
console.log(record);
var json = JSON.parse(record['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = record['s3'];
I have been trying to launch a new EC2 instance as well as add a piece of string data to my SQS through lambda in response to an object upload event in my s3 bucket.
I have been able to successfully to update my SQS but has been unable to initialise the new EC2 instance. Despite setting the time allocation to the lambda function to the maximum time of 5mins and increasing memory allocation, an operation timeout error continuously surface.
My code is as below. Can anyone point out what are the potential causes for such an error? While I have stuck my whole piece of code here for reference, the area concerning the launch is towards the end of the code.
Thank you so much!
console.log('Loading function');
var fs = require('fs');
var async = require('async');
var aws = require('aws-sdk');
var s3 = new aws.S3({ apiVersion: '2006-03-01' });
var sqs = new aws.SQS({apiVersion: '2012-11-05'});
var ecs = new aws.ECS({apiVersion: '2014-11-13'});
var ec2 = new aws.EC2({apiVersion: '2015-10-01'});
// Check if the given key suffix matches a suffix in the whitelist. Return true if it matches, false otherwise.
exports.checkS3SuffixWhitelist = function(key, whitelist) {
if(!whitelist){ return true; }
if(typeof whitelist == 'string'){ return key.match(whitelist + '$') }
if(Object.prototype.toString.call(whitelist) === '[object Array]') {
for(var i = 0; i < whitelist.length; i++) {
if(key.match(whitelist[i] + '$')) { return true; }
}
return false;
}
console.log(
'Unsupported whitelist type (' + Object.prototype.toString.call(whitelist) +
') for: ' + JSON.stringify(whitelist)
);
return false;
};
exports.handler = function(event, context) {
//console.log('Received event:', JSON.stringify(event, null, 2));
console.log('Received event:');
//Read in the configuration file
var config = JSON.parse(fs.readFileSync('config.json', 'utf8'));
if(!config.hasOwnProperty('s3_key_suffix_whitelist')) {
config.s3_key_suffix_whitelist = false;
}
console.log('Config: ' + JSON.stringify(config));
var name = event.Records[0].s3.object.key;
if(!exports.checkS3SuffixWhitelist(name, config.s3_key_suffix_whitelist)) {
context.fail('Suffix for key: ' + name + ' is not in the whitelist')
}
// Get the object from the event and show its key
var bucket = event.Records[0].s3.bucket.name;
var key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
var params = {
Bucket: bucket,
Key: key
};
s3.getObject(params, function(err, data) {
if (err) {
console.log(err);
var message = "Error getting object " + key + " from bucket " + bucket +
". Make sure they exist and your bucket is in the same region as this function.";
console.log(message);
context.fail(message);
} else {
console.log('CONTENT TYPE:', key);
context.succeed(key);
}
});
//Sending the image key as a message to SQS and starting a new instance on EC2
async.waterfall([
function(next){
var params = {
MessageBody: JSON.stringify(event),
QueueUrl: config.queue
};
console.log("IN QUEUE FUNCTION");
sqs.sendMessage(params, function (err, data) {
if (err) { console.warn('Error while sending message: ' + err); }
else { console.info('Message sent, ID: ' + data.MessageId); }
next(err);
});
},
function (next) {
console.log("INITIALIZING ECS");
var params = {
ImageId: 'ami-e559b485',
MinCount: 1,
MaxCount: 1,
DryRun: true,
InstanceType: 't2.micro',
KeyName: 'malpem2102',
SubnetId: 'subnet-e8607e8d'
}
ec2.runInstances(params, function(err,data){
if(err){
console.log(err, err.stack);
context.fail('Error', "Error getting file: " + err);
return;
} else{
var instanceId = data.Instances[0].InstanceId;
console.log("Created instance ", instanceId);
context.suceed("Created instance");
}
});
}
], function(err){
if (err) {
context.fail('An error has occurred: ' + err);
}
else {
context.succeed('Successfully processed Amazon S3 URL.');
}
}
);
};