I am trying to get all the execution history using lambda function and store it to DynamoDB. The function returns about 20 executions and a string value called NextToken that is to be used in the next call to get the rest of the executions.
This is my code.
const AWS = require('aws-sdk');
const dynamoDb = new AWS.DynamoDB.DocumentClient({
region: 'myregion'
});
exports.handler = (event, context, callback) => {
const table = 'myDynamoDB';
const executionARN = "arn:aws:region:accountid:execution:myStateMachine:test";
var stepfunctions = new AWS.StepFunctions();
var params = {
executionArn: executionARN,
maxResults: 20,
nextToken: null,
reverseOrder: false
};
stepfunctions.getExecutionHistory(params, function(err, dataExecution) {
if (err){
console.log(err, err.stack);
}
else {
const params2 = {
TableName: table,
Item: {
id: executionARN,
execution_history: dataExecution
}
};
dynamoDb.put(params2).promise();
}
});
};
nextToken needs to be used in parms pass to next call to getExecutionHistory. You can recursively call this function till all tokens are exhausted. Came across similar situation while fetching logs via Cloud watch.
Sample for fetching history recursively,
Wrap getExecutionHistory into promise and add to different JS file (let's say writer.js) then your main index.js file can call that function like this,
// writer.js which writes record to Dynamodb
// returns promise
// when history is fetched , dynamodb will be inserted and it will resolve dataexecution which has nextToken
module.exports.get = function(fwdtoken) {
if (fwdtoken) parms.nextToken= fwdtoken;
return new Promise ( (resolve, reject)=>{
stepfunctions.getExecutionHistory(params, function(err, dataExecution) {
if (err){
reject(err.stack)
}
else {
const params2 = {
TableName: table,
Item: {
id: executionARN,
execution_history: dataExecution
}
};
dynamoDb.put(params2).promise();
resolve(dataExecution)
}
});
})
};
//This goes in main logic
// Invokes getAllLogs recursilvely
var writer = require('./writer');
var fwdtoken;
function getAllLogs(fwdtoken, fetchCount) {
fetchCount = fetchCount || 0;
if (fetchCount > 40) {
throw new Error("Fetched too many times.");
}
return new Promise( (resolve) => {
writer.get(fwdtoken).then( function consolidate( dataExecution ) {
resolve( dataExecution );
});
})
.then(function ( dataExecution ) {
if (dataExecution.nextForwardToken) {
fwdtoken = dataExecution.nextForwardToken;
getAllLogs(fwdtoken, fetchCount+ 1)
}
else
return fwdtoken
});
}
getAllLogs(fwdtoken, 0);
Related
I am using a lambda function for dynamo streaming after a record is inserted.
However, after the dynamoDB.update call, it seems the lambda is dead, and there are no other logs printed. Can anyone help?
Here is my javascript code:
/* Amplify Params - DO NOT EDIT
API_SCHOLARGRAPH_EMPLOYEEINFOTABLE_ARN
API_SCHOLARGRAPH_EMPLOYEEINFOTABLE_NAME
ENV
REGION
Amplify Params - DO NOT EDIT */
const AWS = require('aws-sdk');
const awsConfig = {
region: process.env.REGION,
endpoint:
process.env.DYNAMODB_ENDPOINT ||
`https://dynamodb.${process.env.REGION}.amazonaws.com`
};
const dynamoDB = new AWS.DynamoDB.DocumentClient(awsConfig);
async function handleNewEmployeeInfo(dynamoId, employeeId) {
console.log(
`[handleNewEmployeeInfo] begin. dynamoId(${dynamoId}) employeeId(${employeeId})`
);
try {
const employeeName = "TestString";
// this log is working
console.log(
'try to update table:',
process.env.API_SCHOLARGRAPH_EMPLOYEEINFOTABLE_NAME,
dynamoId,
employeeName
);
// something wrong with the update
await dynamoDB
.update({
TableName: process.env.API_SCHOLARGRAPH_EMPLOYEEINFOTABLE_NAME,
Key: {
id: dynamoId
},
UpdateExpression: `set employeeName = :employeeName`,
ExpressionAttributeValues: {
':employeeName': employeeName
}
})
.promise()
.then((data) => console.log(data.Attributes))
.catch(console.error);
} catch (error) {
// NOT working
console.log('[ERROR]:', error);
}
// NOT working
console.log('[FINISH] call end');
}
async function handleEventRecord(record) {
console.log(record.eventID);
console.log(record.eventName);
try {
// after EmployeeInfo is created by the admin
if (record.eventName === 'INSERT') {
const arn = record.eventSourceARN;
console.log(`[INSERT][${arn}]: ${JSON.stringify(record)}`);
if (arn.includes(':table/EmployeeInfo')) {
const dynamoId = record.dynamodb.NewImage.id['S'];
const employeeId = record.dynamodb.NewImage.employeeId['S'];
await handleNewEmployeeInfo(dynamoId, employeeId);
}
}
} catch (error) {
console.log('[ERROR]:', error);
}
}
/**
* #type {import('#types/aws-lambda').APIGatewayProxyHandler}
*/
exports.handler = async (event) => {
console.log(`EVENT: ${JSON.stringify(event)}`);
event.Records.forEach(async (record) => {
await handleEventRecord(record);
});
return Promise.resolve('Successfully processed DynamoDB record');
};
After I remove the update logic, the following log works fine. So I am pretty sure it is the update line has the problem.
I am using aws lambda that gets a list of items and it pushes items in the table asynchronously. Here promise.all(promiseList) does not add the data in the table but doing promise.all(promiseList[0]) ensures that my first element is pushed in the table. How can I make it to wait for all?
My code-
let promiseList = [];
// await Promise.all(
promiseList.push(
event.Records.map(async record => {
console.log(record.dynamodb.Keys.ServiceOrderId.S);
console.log('Inside initiate payment');
const paymentObject = {
paymentId: record.dynamodb.Keys.ServiceOrderId.S,
totalAmount: "1200"
};
console.log(paymentObject);
const dynamoDBParams = {
TableName : TABLE_NAME,
Item : paymentObject
};
return await DynamoDBClient.put(dynamoDBParams).promise();
}
)
);
return await Promise.all(promiseList[0]);
couple of changes I can see
We don't need to await for every put to complete, it will be inefficient, we want continue executing all at once.
DynamoDBClient.put(dynamoDBParams).promise() is returning a promise, which can be pushed into an array, rather than pushing entire loop
Here is how full Lambda looks like:
const AWS = require("aws-sdk");
AWS.config.update({ region: "us-east-1" });
let docClient = new AWS.DynamoDB.DocumentClient();
exports.handler = async function(event) {
const table = 'test'
var promiseArray = [];
event.Records.map(record => {
console.log('record', record);
const promise = docClient.put({
TableName: table,
Item: {
pk: record.pk,
title: record.title,
},
}).promise();
promiseArray.push(promise)
})
return Promise.all(promiseArray);
}
I am trying to pull and display data from DynamoDb by using AWS Lambda function and have it display on AWS Lex. It is returning the if portion with the word "undefined" in the place where the data should be. The information is in the DynamoDB database. I am mainly using AWS servers and building this serverless Lex bot application.
AWS code Segments
const AWS = require('aws-sdk');
const db = new AWS.DynamoDB.DocumentClient({region: 'us-east-1'});
exports.handler = async (event) => {
// TODO implement
const response = {
statusCode: 200,
body: JSON.stringify('Hello from Lambda!'),
};
return response;
};
function getExamSchedule(exam2, table){
return new Promise(function(resolve){
var info = 0;
var params = {
TableName: table,
FilterExpression: "#key = :data",
ExpressionAttributeNames: {
"#key": "Resource",
},
ExpressionAttributeValues:{
":data": exam2
}
};
db.scan(params, function(err, data){
if(err){
console.log("Error: " + err);
info = 0;
} else {
console.log("Success", data);
info = {"ClassID" :data.Items[0].ClassID, "ExamDate " :data.Items[0].ExamDate,
"ExamEndTime:" :data.Items[0].ExamEndTime, "ExamLocation" :data.Items[0].ExamLocation,
"ExamStartTime" :data.Items[0].ExamStartTime};
}
resolve(info);
});
});
}
This is the portion where the issue might be occurring.
exports.handler = async (event, context, callback) => {
var exam2 = event.currentIntent.slots.ClassID;
var info = await getExamSchedule(exam2, "Final_Exams");
var res;
// if (info !== 0)
if(info != null){
res =`The exam information for ${exam2} is ${info.ClassID} Date: ${info.ExamDate}
End time: ${info.ExamEndTime} Location: ${info.ExamLocation} Start time: ${info.ExamStartTime}`;
} else {
res = `The exam is not entered into our database, please look for another exam.`;
}
callback(null, {
"dialogAction": {
"type":"Close",
"fulfillmentState": "Fulfilled",
"message":{
"contentType": "PlainText",
"content": res
}
}
});
};
Could you perhaps share some of the logs and/or stack traces that you encounter when running the code?
I have a lambda function that verifies user credentials. Upon success it should call another lambda function, as a destination, that generates a token. When I test the first function, it is successful but it does not call the destination which is the other lambda function. It only gives me the success message from the first function.
Function one
exports.handler = function (event, context) {
var id = event.email;
var params = {
TableName: "User",
KeyConditionExpression: "#email = :email",
ExpressionAttributeNames:{
"#email": "email",
},
ExpressionAttributeValues: {
":email": {S: event.email},
}
};
if (id && id !== '') {
dynamo.query(params, function (err, data, callback) {
if (err) {
context.done(err);
}
else {
var user = data.Items[0];
if (user) {
var encryptedParams = {
CiphertextBlob: Buffer.from(user.password.B),
};
kms.decrypt(encryptedParams, function (err, decrypteddata) {
if (err) {
console.log(err, err.stack);
context.done(err);
}
else {
if(event.password == decrypteddata.Plaintext.toString()) {
console.log("User authenticated");
}
}
});
}
}
});
}
else {
return {
statusCode: 400,
body: "No email provided."
}
}
};
Function two
exports.handler = async (event) => {
var expires = moment().add('days', 7).valueOf();
var token = jwt.encode({
iss: event.email,
exp: expires
}, app.get('jwtTokenSecret'));
const response = {
token: token,
express: expires,
statusCode: 200
};
console.log("token granted");
return response;
};
Your code doesn't seem to be indicating when a successful execution has been completed. Destinations needs the "OnSuccess" indicator to determine which destination to trigger. This is possibly why the second function is not being executed.
See: Lambda Destinations: What We Learned the Hard Way - CloudProse - Trek10 Blog
Is there a way to transfer the messages I get from SQS and send them over to Dynamodb? I've tried making a Lambda function using CloudWatch to trigger it every minute. I'm open to using any other services in AWS to complete this task. I'm sure there's a simple explanation to this that I'm just overlooking.
*Edit my code does not work, I'm looking for either a fix to my code or another solution to accomplish this.
**Edit got it working.
'use strict';
const AWS = require('aws-sdk');
const SQS = new AWS.SQS({ apiVersion: '2012-11-05' });
const Lambda = new AWS.Lambda({ apiVersion: '2015-03-31' });
const QUEUE_URL = 'SQS_URL';
const PROCESS_MESSAGE = 'process-message';
const DYNAMO_TABLE = 'TABLE_NAME';
function poll(functionName, callback) {
const params = {
QueueUrl: QUEUE_URL,
MaxNumberOfMessages: 10,
VisibilityTimeout: 10
};
// batch request messages
SQS.receiveMessage(params, function(err, data) {
if (err) {
return callback(err);
}
// parse each message
data.Messages.forEach(parseSQSMessage);
})
.promise()
.then(function(){
return Lambda.invokeAsync({})
.promise()
.then(function(data){
console.log('Recursion');
})
}
)
.then(function(){context.succeed()}).catch(function(err){context.fail(err, err.stack)});
}
// send each event in message to dynamoDB.
// remove message from queue
function parseSQSMessage(msg, index, array) {
// delete SQS message
var params = {
QueueUrl: QUEUE_URL,
ReceiptHandle: msg.ReceiptHandle
};
SQS.deleteMessage(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
}
// store atomic event JSON directly to dynamoDB
function storeEvent(event) {
var params = {
TableName : DYNAMO_TABLE,
Item: event
};
var docClient = new AWS.DynamoDB.DocumentClient();
docClient.put(params, function(err, data) {
if (err) console.log(err);
else console.log(data);
});
}
exports.handler = (event, context, callback) => {
try {
// invoked by schedule
poll(context.functionName, callback);
} catch (err) {
callback(err);
}
};
var aws = require( "aws-sdk" );
// get configuration defaults from config file.
var tableName = 'Table_Name';
var queueUrl = 'SQS_URL';
var dbClient = new aws.DynamoDB.DocumentClient();
var sqsClient = new aws.SQS();
// get config values from dynamodb - if the config values are found, then override existing values
// this will occur on every execution of the lambda which will allow real time configuration changes.
var updateConfig = function updateConfigValues(invokedFunction, cb) {
var params = {
TableName: "Table_NAME",
Key: {
"KEY": "KEY"
}
};
dbClient.get(params, function(err, data) {
if(err) {
console.log("ERR_DYNAMODB_GET", err, params);
}
else if(!data || !data.Item) {
console.log("INFO_DYNAMODB_NOCONFIG", params);
}
else {
queueUrl = data.Item.config.queueUrl;
tableName = data.Item.config.tableName;
}
return cb(err);
});
};
// save the email to dynamodb using conditional write to ignore addresses already in the db
var saveEmail = function saveEmail(messageBody, cb) {
var params = {
TableName:tableName,
Item:messageBody,
ConditionExpression : "attribute_not_exists(clickId)",
};
dbClient.put(params, function(err, data) {
cb(err, data);
});
};
var deleteMessage = function deleteMessage(receiptHandle, cb) {
var params = {
QueueUrl: queueUrl,
ReceiptHandle: receiptHandle
};
sqsClient.deleteMessage(params, function(err, data) {
cb(err, data);
});
}
exports.handler = function(event, context) {
updateConfig(context.invokedFunctionArn, function(err) {
if(err) {
context.done(err);
return;
}
console.log("INFO_LAMBDA_EVENT", event);
console.log("INFO_LAMBDA_CONTEXT", context);
sqsClient.receiveMessage({MaxNumberOfMessages:10 , QueueUrl: queueUrl}, function(err, data) {
if(err) {
console.log("ERR_SQS_RECEIVEMESSAGE", err);
context.done(null);
}
else {
if (data && data.Messages) {
console.log("INFO_SQS_RESULT", " message received");
var message = JSON.parse(data.Messages[0].Body);
var messageBody = message.Message;
messageBody = JSON.parse(messageBody);
// loops though the messages and replaces any empty strings with "N/A"
messageBody.forEach((item) => {
var item = item;
var custom = item.customVariables;
for (i = 0; i < custom.length; i++) {
if(custom[i] === ''){
custom[i] = 'N/A';
}
item.customVariables = custom;
}
for(variable in item) {
if(item[variable] === ""){
item[variable] = "N/A";
console.log(item);
}
}
var messageBody = item;
});
var messageBody = messageBody[0];
// Logs out the new messageBody
console.log("FIXED - ", messageBody);
// Checks for errors and delets from que after sent
saveEmail(messageBody, function(err, data) {
if (err && err.code && err.code === "ConditionalCheckFailedException") {
console.error("INFO_DYNAMODB_SAVE", messageBody + " already subscribed");
deleteMessage(message.MessageId, function(err) {
if(!err) {
console.error("INFO_SQS_MESSAGE_DELETE", "receipt handle: " + message.MessageId, "successful");
} else {
console.error("ERR_SQS_MESSAGE_DELETE", "receipt handle: " + message.MessageId, err);
}
context.done(err);
});
}
else if (err) {
console.error("ERR_DYNAMODB_SAVE", "receipt handle: " + message.MessageId, err);
context.done(err);
}
else {
console.log("INFO_DYNAMODB_SAVE", "email_saved", "receipt handle: " + message.MessageId, messageBody.Message);
deleteMessage(message.MessageId, function(err) {
if(!err) {
console.error("INFO_SQS_MESSAGE_DELETE", "receipt handle: " + message.MessageId, "successful");
} else {
console.error("ERR_SQS_MESSAGE_DELETE", "receipt handle: " + message.MessageId, err);
}
context.done(err);
});
}
});
} else {
console.log("INFO_SQS_RESULT", "0 messages received");
context.done(null);
}
}
});
});
}