Add information into DynamoDB using Alexa Lambda function - amazon-web-services

I've been stuck on this code for about a day now.
I am just trying to add information into DynamoDB through a launch request using Alexa.
I get the following error code:
"errorMessage": "RequestId: f96ae2cb-1dbf-11e7-a267-b7cf2f2c95a0 Process exited before completing request"
The information actually gets inserted into DynamoDB, but I can't add more functions to the program because of the error.
From what I understand, it may be a problem with the callback.
I have tried many different ways to "callback" or return something, but I haven't figured out how to avoid the error.
If I uncomment this.emit(':tell', "Hello, inserting Apples into DynamoDB"); the error goes away, but no information gets inserted.
What am I doing wrong and how can I fix it?
Below is my code;
'use strict';
var Alexa = require('alexa-sdk');
const doc = require('dynamodb-doc');
const dynamo = new doc.DynamoDB();
exports.handler = function(event, context, callback) {
var alexa = Alexa.handler(event, context);
alexa.registerHandlers(handlers);
alexa.execute();
};
var handlers = {
'LaunchRequest': function(event, context, callback) {
// this.emit(':tell', "Hello, inserting Apples into DynamoDB");
var params = {
Item: {
date: Date.now(),
message: "Apples"
},
TableName: '_yourTableName'
};
dynamo.putItem(params, function(err, data) {
if (err) {
callback(err, null);
} else {
callback(null, data);
}
});
context.done();
}
};

This is because you are adding values to Dynamodb, which is a callback but context.done(); is written outside the callback. Before dynamoDb completes the operation it will call context.done(); hence it will exit the process

Related

Is there a way to get the input Transcript from LEX into a lambda which then allows connect to grab that transcript and set it as a contact attribute?

I have this lambda below that gets items from DynamoDB and returns it to connect just fine... however, I cannot seem to get the inputTranscript over. Is there a way I could implement this in my current lambda so that Connect can access it? I am seriously stumped on this, as everything I have tried in the documentation has not worked for me.
const AWS = require("aws-sdk");
const dynamodb = new AWS.DynamoDB();
exports.handler = function(event, context, callback) {
console.log(`DEBUG:\t Event`, JSON.stringify(event, null, 4));
var params = {
Key: {
"name": {
"S": '"' + event.slots.list + '"'
}
},
TableName: 'ServiceOfferings'
};
dynamodb.getItem(params, function(err, data) {
if (err) {
console.log("ERROR:\t", err);
callback(err);
} else {
console.log(data);
if (data.Item) {
console.log('data returned from DynamoDB ', JSON.stringify(data));
callback(null, {
ServiceOffering: data.Item.name.S.slice(1, -1)
});
}
else {
console.log("no callback number found for intent");
callback(new Error("no callback number found for intent"));
}
}
});
};
The test instance I use to ensure the lambda works correctly is as follows:
{
"dialog-state": "ReadyForFulfillment",
"input-transcript": "my printer is not working",
"slots": {
"list": "Re-IP Project - Printers"
},
"intent-name": "getServiceOffering"
}
The response after Testing this comes out to:
{
"ServiceOffering": "Re-IP Project - Printers"
}
When you log the event, you should be able to see the inputTranscript being passed to your Lambda. So you simply need to take it out of the event and do with it what you want.
const AWS = require("aws-sdk");
const dynamodb = new AWS.DynamoDB();
exports.handler = function(event, context, callback) {
console.log(`DEBUG:\t Event`, JSON.stringify(event, null, 4));
var inputTranscript = event.inputTranscript
...
callback(null, {
ServiceOffering: data.Item.name.S.slice(1, -1),
inputTranscript: inputTranscript
});
That includes the inputTranscript in your callback to Connect (I assume), and you can use the Set Attribute block to save and handle the input within Connect.

Null Response while querying DynamoDB

I am querying dynamodb from lambda function written in node.js env -
Trying to query for table CurrencyPrice where Currency column has value "BLC".
When I test my function in lambda console, - it prints until second console log - "querying DB ConsolePrice" and returns a NULL response. It does not print either of the next two console logs and not sure if it is even connecting to DB.
It seems that the code does not go into ddb.query() function at all - had tried putting all loggers in this function but none gets printed.
I have tried checking all possible aws documentation but not able to understand why this function is not getting executed.
My code looks something like below -
var AWS = require ('aws-sdk');
exports.handler = async (event) =>
{
AWS.config.update({region: 'ap-southeast-2'});
console.log("i am in function");
// Create DynamoDB service object
var ddb = new AWS.DynamoDB.DocumentClient();
var table = 'CurrencyPrice';
var params = {
"Select": "ALL_ATTRIBUTES","TableName": "CurrencyPrice",
};
console.log("querying DB" + table);
ddb.query(params, function(err, data) {
console.log("i am in ddb query");
if (err) {
console.error("Unable to query. Error:", JSON.stringify(err, null,2));
} else {
console.log(data);
}
});
};
Current result that I am getting in lambda console:
Response:
null
Request ID:"XXXX"
Function Logs:
START RequestId: XXX Version: $LATEST
2019-06-02T13:31:55.189Z XXXX INFO i am in function
2019-06-02T13:31:55.331Z XXXX INFO querying DBCurrencyPrice
2019-06-02T13:31:55.390Z XXXX INFO { Select: 'ALL_ATTRIBUTES', TableName: 'CurrencyPrice' }
END RequestId: XXXX
I expect that at least it prints "Unable to Query" or actual data that it connects to DB and query?
If you're using async/await you would want to return a promise.
var AWS = require("aws-sdk");
AWS.config.update({ region: "ap-southeast-2" });
var ddb = new AWS.DynamoDB.DocumentClient();
exports.handler = async event => {
console.log("i am in function");
// Create DynamoDB service object
var table = "CurrencyPrice";
var params = {
Select: "ALL_ATTRIBUTES",
TableName: "CurrencyPrice"
};
console.log("querying DB" + table);
return ddb
.query(params)
.promise()
.then((err, data) => {
console.log("i am in ddb query");
if (err) {
console.error("Unable to query. Error:", JSON.stringify(err, null, 2));
} else {
console.log(data);
}
});
};
The AWS example
ddbClient.query(params, callback)
shows a synchronous matter. Execution continues without waiting for the callback finishes. That's why you got Null.
You will have to wrap this .query() call into a Promise, so the execution will wait for completion of this callback.
Make query a promise and wait for its fulfilled state.
const promise = await ddb.query(params, function(err, data) {
console.log("i am in ddb query");
if (err) {
console.error("Unable to query. Error:", JSON.stringify(err, null,2));
} else {
console.log(data);
// process your data
}
}).promise();
return promise;

AWS - Sending 1000's of emails from Lambda / Node.js

I have a "main" Lambda function that gets triggered by SNS. It pulls a list of recipients from the database and it needs to send each of them a message based on a template, replacing things like first name and such.
The way I have it setup is I created another Lambda function called "email-send" which is subscribed to "email-send" topic. The "main" Lambda then loops through the recipients list and publishes messages to "email-send" with a proper payload (from, to, subject, message). This might eventually need to process 1000's of emails in a single batch.
Is this a good approach to my requirements? Perhaps Lambda/SNS is not a way to go? If so, what would you recommend.
With this setup I am running into issues when my "main" function finishes running and somehow "sns.publish" does not get triggered in my loop. I assume because I am not letting it finish. But I am not sure how to fix it, being a loop.
Here is the snippet from my Lambda function:
exports.handler = (event, context, callback) => {
// code is here to pull data into "data" array
// process records
for (var i = 0; i < data.length; i++) {
var sns = new aws.SNS();
sns.publish({
Message: JSON.stringify({ from: data[i].from, to: data[i].to, subject: subject, body: body }),
TopicArn: 'arn:aws:sns:us-west-2:XXXXXXXX:email-send'
}, function(err, data) {
if (err) {
console.log(err.stack);
} else {
console.log('SNS pushed!');
}
});
}
context.succeed("success");
};
Thanks for any assistance.
Your code is doing this...
Begin calling sns.publish() 1000 times
Return (through context.succeed())
You didn't wait for those 1000 calls to finish!
What your code should do is...
Begin calling sns.publish() 1000 times
When all calls to sns.publish() has returned, then return. (context.succeed is old so we should use callback() instead).
Something like this...
// Instantiate the client only once instead of data.length times
const sns = new aws.SNS();
exports.handler = (event, context, callback) => {
const snsCalls = []
for (var i = 0; i < data.length; i++) {
snsCalls.push(sns.publish({
Message: JSON.stringify({
from: data[i].from,
to: data[i].to,
subject: subject,
body: body
}),
TopicArn: 'arn:aws:sns:us-west-2:XXXXXXXX:email-send'
}).promise();
}
return Promise.all(snsCalls)
.then(() => callback(null, 'Success'))
.catch(err => callback(err));
};
I think that a better approach is using AWS Lambda API.
That way, you don't need SNS.
For example:
var lambda = new AWS.Lambda({region: AWS_REGION});
function invokeWorkerLambda(task, callback) {
var params = {
FunctionName: WORKER_LAMBDA_NAME,
InvocationType: 'Event',
Payload: JSON.stringify({.....})
};
lambda.invoke(params, function(err, data) {
if (err) {
console.error(err, err.stack);
callback(err);
} else {
callback(null, data);
}
});
}
As you can see, you don't need SNS for lambda function's invocation.
Important: Another suggestion is to create an Array of invocations (functions) and later execute them as follow:
async.parallel(invocations, function(err) {
if (err) {
console.error(err, err.stack);
callback(err);
}
});
Take a look at this link where I got a lot of knowledge about Lambda invocation: https://cloudonaut.io/integrate-sqs-and-lambda-serverless-architecture-for-asynchronous-workloads/

Database retrieval error "Process exited before completing request"

I've got problem with getting data from database via AWS lambda. I've got code in Node.js:
'use strict';
console.log('Loading function');
let doc = require('dynamodb-doc');
let dynamo = new doc.DynamoDB();
exports.handler = (event, context, callback) => {
//console.log('Received event:', JSON.stringify(event, null, 2));
if (event.tableName) {
event.id.TableName = event.tableName;
}
dynamo.getItem(event.id, callback);
callback(null, event.id); // Echo back the first key value
// callback('Something went wrong');
};
And testing it with such a JSON code:
{
"tableName": "Events",
"payload": "dbf943c3-aaac-473d-9c6a-7651280e0f90"
}
I get an error:
{
"errorMessage": "Process exited before completing request"
}
Does anyone know what is wrong?
PS. My Database is called Events and id of the record which I want to get is: dbf943c3-aaac-473d-9c6a-7651280e0f90

AWS Lambda subscribed to SNS topic doesn't process all messages to the topic

I am building an alerting system powered by Amazon services.
I drop a file onto S3 daily which spawns a lambda function (let's call it the Generator function) to process the file.
Generator builds alerts based on this file and posts multiple messages to an SNS topic (let's call it Outbox) - one message for each recipient calculated by Generator.
I have a second lambda function (let's call it Courier) subscribed to Outbox which should take each message and do something with it.
The Generator code:
// 'Generator' function
exports.handler = function (event, context) {
console.log('Reading options from event:\n', util.inspect(event, {depth: 5}));
var users = {};
var userSubscriptions = {};
var alerts = {};
var artists = {};
var tracks = {};
async.waterfall([
function downloadSubscribersFile (next) {
// Do stuff
},
function downloadAndFormatActionsFile (next) {
// Download data file and analyse
},
function publishAlerts (next) {
// Now we have alerts built, we need to mail them out
var recipients = Object.keys(alerts);
async.each(recipients, function (recipient, callback) {
var recipientAlert = alerts[recipient];
console.log(util.inspect(recipientAlert, { depth: 10 }));
if (alerts[recipient].actions.artists.length < 1) {
return callback();
}
var params = {
TopicArn: SNS_TOPIC_ARN,
Subject: recipient,
Message: JSON.stringify(recipientAlert)
};
sns.publish(params, function (err, data) {
if (err) {
console.log(err);
return callback(err);
} else {
console.log('PUBLISHED MESSAGE: \n', util.inspect(data, { depth: 10 }));
console.log('MESSAGE WAS: \n', util.inspect(params, { depth: 10 }));
}
return callback();
})
}, function (err) {
if (err) return next(err);
next();
})
}
], function (err) {
if (err) {
console.log('Error: ', err);
} else {
console.log('Process successful');
}
context.done();
})
}
And the other function:
// 'Courier' function
console.log('Loading function');
exports.handler = function(event, context) {
console.log(JSON.stringify(event, null, 2));
console.log('From SNS:', event.Records[0].Sns.Message);
context.succeed();
};
When my Generator function is invoked, I can see 12 messages should be posted to the SNS topic. There are no errors recorded when publishing these messages, and yet the Courier function only fires once.
I'm wondering if anyone's had any similar issues with this and whether there's something I'm missing here. It could be that there's something I haven't configured correctly in AWS but I'm pretty confident everything is set up as it should be.
UPDATE:
After looking at the messages I'm attempting to send, it seems the message that WAS picked up by SNS seemed to be the one with the smallest payload. I'm wondering if SNS is able to cope with lots of small, frequent messages to a topic...?
Yes, SNS can cope with high throughput on a single topic. However, there is a maximum message size 256KB, so if your messages are larger than that it might be the cause.
I see your Generator function is logging the messages, are you seeing 12 logged messages with message ids? I see you have a variable alerts which you are hoping to get multiple recipients from, but I don't see where you are setting it.
My advice: Add more logging before you sent the message to verify that what you think should be happening is actually happening.