Amazon Lambda writing to DynamoDB - amazon-web-services

I'm new to dynamoDB and I'm trying to write some data to the table using Lambda.
So far I have this:
'AddFood': function () {
var FoodName = this.event.request.intent.slots.FoodName.value;
var FoodCalories = this.event.request.intent.slots.FoodCalories.value;
console.log('FoodName : ' + FoodName);
const params = {
TableName: 'Foods',
Item: {
'id': {"S": 3},
'calories': {"S": FoodCalories},
'food': {"S": FoodName}
}
};
writeDynamoItem(params, myResult=>{
var say = '';
say = myResult;
say = FoodName + ' with ' + FoodCalories + ' calories has been added ';
this.response.speak(say).listen('try again');
this.emit(':responseReady');
});
function writeDynamoItem(params, callback) {
var AWS = require('aws-sdk');
AWS.config.update({region: AWSregion});
var docClient = new AWS.DynamoDB();
console.log('writing item to DynamoDB table');
docClient.putItem(params, function (err, data) {
if (err) {
callback(err, null)
} else {
callback(null, data)
}
});
}
}
Does anyone know why the data is not appearing in the database?
I have checked the IAM and the policy is set to AmazonDynamoDBFullAccess.

After making a few changes to the write function, the following code allowed me to write items to the database:
function writeDynamoItem(params, callback) {
const AWS = require('aws-sdk');
AWS.config.update({region: AWSregion});
const docClient = new AWS.DynamoDB.DocumentClient({region: 'eu-west-1'});
console.log('writing item to DynamoDB table');
docClient.put(params, function (err, data) {
if (err) {
callback(err, null)
console.error("Unable to write item. Error JSON:", JSON.stringify(err, null, 2))
} else {
callback(null, data)
}
});
}

To write to DynamoDB from a Lambda function (using Python) you must use the boto3 package and load the dynamodb resource:
Hope this helps, it is writing food and calories from the event and writing them with a generated uuid
import boto3
import os
import uuid
def writeToDynamo(event, context):
recordId = str(uuid.uuid4())
voice = event["food"]
text = event["calories"]
print('Generating new DynamoDB record, with ID: ' + recordId)
print('Input food: ' + food)
print('Input calories: ' + calories)
#Creating new record in DynamoDB table
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table(os.environ['DB_TABLE_NAME'])
table.put_item(
Item={
'id' : recordId,
'food' : food,
'calories' : calories
}
)
return recordId

Related

Print the JSON value from AWS S3 bucket in Lambda function

What is the way to get the response body of the uploaded JSON file to the lambda function to print it?
I used the following code but it is specified for Content Type. Any suggestions for this please?
// console.log('Loading function');
const aws = require('aws-sdk');
const s3 = new aws.S3({ apiVersion: '2006-03-01' });
exports.handler = async (event, context) => {
//console.log('Received event:', JSON.stringify(event, null, 2));
// Get the object from the event and show its content type
const bucket = event.Records[0].s3.bucket.name;
const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: bucket,
Key: key,
};
try {
const { ContentType } = await s3.getObject(params).promise();
console.log('CONTENT TYPE:', ContentType);
console.log('Body: ', );
console.log("response: " + "I want to print the response body here when a JSON file uploaded")
return ContentType;
} catch (err) {
console.log(err);
const message = `Error getting object ${key} from bucket ${bucket}. Error : ` + err;
console.log(message);
throw new Error(message);
}
};
return value of getObject contains Body field.
const { ContentType, Body } = await s3.getObject(params).promise();
console.log('CONTENT TYPE:', ContentType);
console.log('Body: ', Body);

Flutter aws amplify not returning data when calling graphql api

On button click I have programmed to call a graphql api which is connected to a Lambda function and the function is pulling data from a dynamodb table. The query does not produce any error, but it doesn't give me any results as well. I have also checked the cloudwatch logs and I dont see any traces of the function being called. Not sure on the careless mistake I am making here.
Here is my api
void findUser() async {
try {
String graphQLDocument = '''query getUserById(\$userId: ID!) {
getUserById(userId: \$id) {
id
name
}
}''';
var operation = Amplify.API.query(
request: GraphQLRequest<String>(
document: graphQLDocument,
variables: {'id': 'USER-14160000000'}));
var response = await operation.response;
var data = response.data;
print('Query result: ' + data);
} on ApiException catch (e) {
print('Query failed: $e');
}
}
Here is my lambda function -
const getUserById = require('./user-queries/getUserById');
exports.handler = async (event) => {
var userId = event.arguments.userId;
var name = event.arguments.name;
var avatarUrl = event.arguments.avatarUrl;
//console.log('Received Event - ', JSON.stringify(event,3));
console.log(userId);
switch(event.info.fieldName) {
case "getUserById":
return getUserById(userId);
}
};
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region: 'ca-central-1'});
async function getUserById(userId) {
const params = {
TableName:"Bol-Table",
KeyConditionExpression: 'pk = :hashKey and sk = :sortKey',
ExpressionAttributeValues: {
':hashKey': userId,
':sortKey': 'USER'
}
};
try {
const Item = await docClient.query(params).promise();
console.log(Item);
return {
id: Item.Items[0].pk,
name: Item.Items[0].details.displayName,
avatarUrl: Item.Items[0].details.avatarUrl,
createdAt: Item.Items[0].details.createdAt,
updatedAt: Item.Items[0].details.updatedAt
};
} catch(err) {
console.log("BOL Error: ", err);
}
}
module.exports = getUserById;
Upon button click I get this
Moving my comment to an answer:
Can you try changing your graphQLDocumnet to
String graphQLDocument = '''query getUserById(\$id: ID!) {
getUserById(userId: \$id) {
id
name
}
}''';
Your variable is $userId and then $id. Try calling it $id in both places like in your variables object.
Your flutter code is working fine but in lambda from the aws is returning blank string "" to not to print anything

Lambda function is not triggered for all the s3 image upload

I have s3 bucket as below,
myBucket
a/
b/
c/
where myBucket is a s3 bucket and a, b, c are the key folders inside that bucket.
I will upload images into a/. The s3 event notification will trigger SQS which will then trigger lambda function which does the process of removing image background and uploads into b/ folder.
The problem here is for example, if I upload a folder which has around 26 images into s3 only 23 or 22 images are getting triggered by lambda and only those images are getting are processed.
For some reason s3 is not triggering all the images or is that something I should configure in my lambda function?
Here is my function code
exports.handler = async(event, context, callback) => {
try {
console.log(event.Records[0]);
var json = JSON.parse(event.Records[0]['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = json['Records'][0]['s3'];
console.log(json);
var srcBucket = json['bucket']['name'];
console.log('srcBucket: ' + srcBucket);
var srcKey = decodeURIComponent(json['object']['key'].replace(/\+/g, ' '));
console.log('srcKey: ' + srcKey);
var str = (srcKey.split('/').pop()).split('.')[0];
console.log('str: ' + str);
if (str != '') {
var folderPath = srcKey.substr(srcKey.indexOf('/') + 1).split('.')[0];
folderPath = folderPath.substring(0, folderPath.lastIndexOf('/'));
console.log('folderPath: ' + folderPath);
const params1 = { Bucket: srcBucket, Key: srcKey };
var origimage = await s3.getObject(params1).promise();
var destObject = await origimage.Body;
var destKey = 'removebg/' + folderPath + '/' + str + '.jpg';
var options = {
'method': 'POST',
'url': 'https://api.remove.bg/v1.0/removebg',
'headers': {
'X-Api-Key': 'xxxxxxxxxxx'
},
formData: {
'image_file': destObject,
'size': 'auto'
},
encoding: null
};
request(options, function(error, response, body) {
if (error) {
console.log(error);
sendmessage(error, 'Error removing image background', arn, srcBucket + '/' + srcKey, destBucket + destKey);
}
var params = { Bucket: destBucket, Key: destKey, Body: body };
s3.upload(params, function(err, data) {
if (err) {
console.log('Error uploading data: ', err);
sendmessage(err, 'Error uploading transparent image to s3', arn, srcBucket + '/' + srcKey, destBucket + destKey);
}
else { console.log('Successfully uploaded data to ' + destBucket); }
});
});
}
}
catch (e) {
console.log(e);
}
callback(null, 'All done!');
};
Please let me know. Thanks in advance.
I think your problem lies on these lines:
console.log(event.Records[0]);
var json = JSON.parse(event.Records[0]['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = json['Records'][0]['s3'];
Your function is only looking at the first record that is provided to the function. Multiple events can be given the the Lambda function, so your function should loop through the Records entries and process all of the events that are provided.
It should do something like:
for record in event.Records:
console.log(record);
var json = JSON.parse(record['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = record['s3'];

Timeout while launching EC2 instances from AWS Lambda

I have been trying to launch a new EC2 instance as well as add a piece of string data to my SQS through lambda in response to an object upload event in my s3 bucket.
I have been able to successfully to update my SQS but has been unable to initialise the new EC2 instance. Despite setting the time allocation to the lambda function to the maximum time of 5mins and increasing memory allocation, an operation timeout error continuously surface.
My code is as below. Can anyone point out what are the potential causes for such an error? While I have stuck my whole piece of code here for reference, the area concerning the launch is towards the end of the code.
Thank you so much!
console.log('Loading function');
var fs = require('fs');
var async = require('async');
var aws = require('aws-sdk');
var s3 = new aws.S3({ apiVersion: '2006-03-01' });
var sqs = new aws.SQS({apiVersion: '2012-11-05'});
var ecs = new aws.ECS({apiVersion: '2014-11-13'});
var ec2 = new aws.EC2({apiVersion: '2015-10-01'});
// Check if the given key suffix matches a suffix in the whitelist. Return true if it matches, false otherwise.
exports.checkS3SuffixWhitelist = function(key, whitelist) {
if(!whitelist){ return true; }
if(typeof whitelist == 'string'){ return key.match(whitelist + '$') }
if(Object.prototype.toString.call(whitelist) === '[object Array]') {
for(var i = 0; i < whitelist.length; i++) {
if(key.match(whitelist[i] + '$')) { return true; }
}
return false;
}
console.log(
'Unsupported whitelist type (' + Object.prototype.toString.call(whitelist) +
') for: ' + JSON.stringify(whitelist)
);
return false;
};
exports.handler = function(event, context) {
//console.log('Received event:', JSON.stringify(event, null, 2));
console.log('Received event:');
//Read in the configuration file
var config = JSON.parse(fs.readFileSync('config.json', 'utf8'));
if(!config.hasOwnProperty('s3_key_suffix_whitelist')) {
config.s3_key_suffix_whitelist = false;
}
console.log('Config: ' + JSON.stringify(config));
var name = event.Records[0].s3.object.key;
if(!exports.checkS3SuffixWhitelist(name, config.s3_key_suffix_whitelist)) {
context.fail('Suffix for key: ' + name + ' is not in the whitelist')
}
// Get the object from the event and show its key
var bucket = event.Records[0].s3.bucket.name;
var key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
var params = {
Bucket: bucket,
Key: key
};
s3.getObject(params, function(err, data) {
if (err) {
console.log(err);
var message = "Error getting object " + key + " from bucket " + bucket +
". Make sure they exist and your bucket is in the same region as this function.";
console.log(message);
context.fail(message);
} else {
console.log('CONTENT TYPE:', key);
context.succeed(key);
}
});
//Sending the image key as a message to SQS and starting a new instance on EC2
async.waterfall([
function(next){
var params = {
MessageBody: JSON.stringify(event),
QueueUrl: config.queue
};
console.log("IN QUEUE FUNCTION");
sqs.sendMessage(params, function (err, data) {
if (err) { console.warn('Error while sending message: ' + err); }
else { console.info('Message sent, ID: ' + data.MessageId); }
next(err);
});
},
function (next) {
console.log("INITIALIZING ECS");
var params = {
ImageId: 'ami-e559b485',
MinCount: 1,
MaxCount: 1,
DryRun: true,
InstanceType: 't2.micro',
KeyName: 'malpem2102',
SubnetId: 'subnet-e8607e8d'
}
ec2.runInstances(params, function(err,data){
if(err){
console.log(err, err.stack);
context.fail('Error', "Error getting file: " + err);
return;
} else{
var instanceId = data.Instances[0].InstanceId;
console.log("Created instance ", instanceId);
context.suceed("Created instance");
}
});
}
], function(err){
if (err) {
context.fail('An error has occurred: ' + err);
}
else {
context.succeed('Successfully processed Amazon S3 URL.');
}
}
);
};

AWS DynamoDB returns validation error when called from AWS Lambda

I'm using AWS Lambda and try to write something to AWS DynamoDB. I use the following code:
var tableName = "locations";
var item = {
deviceId: {
S: event.deviceId
},
timestamps: {
S: event.timestamp
}
}
var params = {
TableName: tableName,
Item: item
};
dynamo.putItem(params, function(err, data) {
if (err) {
context.fail(new Error('Error ' + err));
} else {
context.success(null);
}
});
And I get the following error:
returns Error ValidationException: One or more parameter values were invalid: Type mismatch for key deviceId expected: S actual: M
This happened because the aws sdk for Nodejs had changed!
If you are using:
var doc = require('dynamodb-doc');
var dynamo = new doc.DynamoDB();
Then the parameters to the putItem call (and most other calls) have changed and instead needs to be:
var tableName = "locations";
var item = {
deviceId: event.deviceId,
timestamp: event.timestamp,
latitude: Number(event.latitude),
longitude: Number(event.longitude)
}
var params = {
TableName: tableName,
Item: item
};
Read all about the new sdk here: https://github.com/awslabs/dynamodb-document-js-sdk