S3 trigger failes to update table in DynamoDB - amazon-web-services

I have created a web app in AWS Amplify. A web page uploads a file to an S3 bucket. I have created a trigger that should create a record in DynamoDB for each uploaded file. My trigger code is called, there are no errors I can find anywhere but the table is not updated.
The trigger code:
exports.handler = async function (event) {
console.log('Received S3 event:', JSON.stringify(event, null, 2));
// Get the object from the event and show its content type
const eventName = event.Records[0].eventName;
const bucket = event.Records[0].s3.bucket.name; //eslint-disable-line
let key = event.Records[0].s3.object.key.replace('%3A', ':');
const imgSize = event.Records[0].s3.object.size;
console.log(`Bucket: ${bucket}`, `Key: ${key}`);
if (eventName === "ObjectCreated:Put") {
console.log("This is a put event")
addDBActivity(key);
}
};
var AWS = require('aws-sdk');
var ddb = new AWS.DynamoDB();
function addDBActivity(key) {
console.log("in addDBActivity");
// let activityTable = "Activity-***-dev"
let activityTable = "Activity-ctuqkflvhvbjxftdvdo64xnzle-dev"
let name = key.split('/').pop();
let owner = key.split('/')[0];
let format = "tcx";
let type = "cycling";
var params = {
TableName: activityTable,
Item: {
key: { S: key },
name: { S: name },
owner: { S: owner },
format: { S: format },
type: { S: type },
}
};
console.log(params);
ddb.putItem(params).promise()
.then(function(data) {
console.log(data);
})
.catch(function(err) {
console.log(err);
});
console.log("after promise");
}
schema.graphql
type Activity #model #auth(rules: [
{ allow: owner, ownerField: "id" }
]) {
id: ID!
name: String!
owner: String!
key: String!
format: String!
type: String!
}
After uploading a file this is the log output in CloudWatch:
2021-12-18T13:14:26.093+02:00
2021-12-18T11:14:26.092Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO This is a put event
2021-12-18T11:14:26.092Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO This is a put event
2021-12-18T13:14:26.093+02:00
2021-12-18T11:14:26.093Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO in addDBActivity
2021-12-18T11:14:26.093Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO in addDBActivity
2021-12-18T13:14:26.123+02:00
2021-12-18T11:14:26.123Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO {
TableName: 'Activity-ctuqkflvhvbjxftdvdo64xnzle-dev',
Item: {
key: {
S: 'public/sourb/ramp_test2tcx-a03e6380-e0c3-4966-b4eb-b74375f930bd.tcx'
},
name: { S: 'ramp_test2tcx-a03e6380-e0c3-4966-b4eb-b74375f930bd.tcx' },
owner: { S: 'public' },
format: { S: 'tcx' },
type: { S: 'cycling' }
}
}
2021-12-18T11:14:26.123Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO { TableName: 'Activity-ctuqkflvhvbjxftdvdo64xnzle-dev', Item: { key: { S: 'public/sourb/ramp_test2tcx-a03e6380-e0c3-4966-b4eb-b74375f930bd.tcx' }, name: { S: 'ramp_test2tcx-a03e6380-e0c3-4966-b4eb-b74375f930bd.tcx' }, owner: { S: 'public' }, format: { S: 'tcx' }, type: { S: 'cycling' } } }
2021-12-18T13:14:26.542+02:00
2021-12-18T11:14:26.522Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO after promise
Any idea what could be the issue?

You are using async handler so your function probably finishes before your database has a chance to get updated. You can wrap your code in a Promise as show in the docs to ensure full execution.

Related

Items doesn't save correctly in dynamo db through the lambda function

In my react native application which has an AWS amplify backend I use a post confirmation lambda function to save the cognito users in my dynamo db data store. My post confirmation lambda function runs fine and I can see the newly created user in my dynamo db but I can't query that user inside my app and also I cannot see the user in admin UI interface. But after about several hours I can query that user and also see the user through admin UI. How to fix this ?
/**
* #type {import('#types/aws-lambda').APIGatewayProxyHandler}
*/
const aws = require("aws-sdk");
const ddb = new aws.DynamoDB();
const tableName = process.env.USERTABLE;
exports.handler = async (event) => {
// insert code to be executed by your lambda trigger
if(!event?.request?.userAttributes?.sub){
console.log("no sub provided")
return;
}
const now = new Date();
const timestamp = now.getTime();
const userItem = {
__typename: { S: 'User' },
_lastChangedAt: { N: timestamp.toString() },
_version: { N: "1" },
updatedAt: { S: now.toISOString() },
createdAt: { S: now.toISOString() },
id: { S: event.request.userAttributes.sub },
Name: { S: event.request.userAttributes.name },
Email: { S: event.request.userAttributes.email },
Phonenumb: { S: event.request.userAttributes.phone_number },
DeliveryAddress: { S: ''}
}
const params = {
Item: userItem,
TableName: tableName
}
try {
await ddb.putItem(params).promise();
console.log("Success");
} catch (e) {
console.log(e)
}
};
this is my lambda function
This is how my query code look
const getUser = async () => {
const userData = await Auth.currentAuthenticatedUser();
const currentUserId = userData.attributes.sub
await DataStore.query(User, currentUserId).then(setUser);
console.log("getting user in home");
};
useEffect(() => {
getUser();
}, []);

Calling RDS data service executeStatement throws "Parameter X has value with no field set"

Not sure if I clearly understood how to provide parameter values when executing rds.executeStatement command.
When I execute the below code I get this error thrown -
{
"errorType": "BadRequestException",
"errorMessage": "Parameter \"userId\" has value with no field set",
"code": "BadRequestException",
"message": "Parameter \"userId\" has value with no field set"
}
Here is my code, How am I supposed to provide the userId and givenName values to the parameters array here.
const AWS = require('aws-sdk');
var RDS = new AWS.RDSDataService({
apiVersion: '2018-08-01'
});
exports.handler = async (event, context) => {
var userId;
var givenName;
var params = {
secretArn: 'secretArn',
resourceArn: 'resourceArn',
database: 'db',
parameters: [{
name: "userId",
value: {
"stringValue": userId
}
},
{
name: "givenName",
value: {
"stringValue": givenName
}
}
]
};
event.Records.forEach(function(record) {
if (record.eventName == 'INSERT') {
userId = record.dynamodb.NewImage.pk.S;
givenName = record.dynamodb.NewImage.sk.S;
params.sql = `INSERT INTO Users (UserId, GivenName) VALUES(:userId, :givenName);`
}
});
await RDS.executeStatement(params).promise();
console.log(params.parameters[0].value);
return 'done';
};
UPDATE March 13th
Attaching the cloudwatch logs printing out userId and givenName -
UPDATE March 16th - Function Updates
const AWS = require('aws-sdk');
const RDS = new AWS.RDSDataService({ apiVersion: '2018-08-01' });
exports.handler = async (event, context) => {
var userId;
var givenName;
var count = 0;
var params = {
secretArn: 'secretArn',
resourceArn: 'resourceArn',
database: 'bol_db',
parameters: [{
name: "userId",
value: {
"stringValue": userId
}
},
{
name: "givenName",
value: {
"stringValue": givenName
}
}
]
};
const promises = event.Records.map(async function(record) {
count++;
context.callbackWaitsForEmptyEventLoop = false;
if (record.eventName == 'INSERT') {
userId = record.dynamodb.NewImage.pk.S;
givenName = record.dynamodb.NewImage.sk.S;
console.log('userId - ' + userId);
console.log('givenName - ' + givenName);
console.log('Params -'+params.parameters);
params.sql = "INSERT INTO Users (UserId, GivenName) VALUES(:userId, :givenName);"
let result = await RDS.executeStatement(params).promise();
console.log('Result -' + result);
}
});
await Promise.all(promises);
console.log(count);
return 'done';
};
It seems that you're setting "stringValue": userId before userId has a value. In JavaScript you can't assign userId later and expect it to be propagated to all places that you used it before.
You should try with var params = {...} inside the .map function or, alternatively, inside the .map function you can loop through the parameter list and if you find the correct one, assign the value then.

DynamoDB returning null nextToken when there are still results in database

I have a GraphQL API (AppSync) backed by a DynamoDB table keyed a specific id with timestamp as the range key. I want to retrieve all possible history for that id so I wrote a query in my GraphQL schema that would allow me to do so. Here's the request vtl:
{
"version": "2017-02-28",
"operation": "Query",
"query": {
"expression": "id = :id",
"expressionValues": {
":id": {
"S": "$context.args.id"
}
}
},
"nextToken": $util.toJson($util.defaultIfNullOrEmpty($context.args.nextToken, null))
}
There could be thousands of items in the ddb table for an id so I wrote a Lambda function to query for all of them and return the result in a list as such (I know the code can be simplified):
exports.handler = async function (event, context, callback) {
const graphqlClient = new appsync.AWSAppSyncClient({
url: process.env.APPSYNC_ENDPOINT,
region: process.env.AWS_REGION,
auth: {
type: 'AWS_IAM',
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
sessionToken: process.env.AWS_SESSION_TOKEN
}
},
disableOffline: true
});
const query = gql`query GetAllItemsById(
$id: String!
$nextToken: String
) {
getAllItemsById(
id: $id
nextToken: $nextToken
) {
exampleField {
subField1
subField2
subField3
}
nextToken
}
}
`;
const initialResult = await graphqlClient.query({
query,
variables: {
id: event.id
}
});
var finalResult = initialResult.data.getAllItemsById.exampleField;
var nextToken = initialResult.data.getAllItemsById.nextToken;
while (nextToken !== null) {
const result = await graphqlClient.query({
query,
variables: {
id: event.id,
nextToken: nextToken
}
});
finalResult = finalResult.concat(result.data.getAllItemsById.exampleField);
nextToken = result.data.getAllItemsById.nextToken;
}
console.log("Total Results: " + finalResult.length);
return callback(null, finalResult);
};
For some reason, not all items are being returned. nextToken is null before all results are returned. I know DDB has a 1MB limit for query which is why I'm paginating using nextToken but why is it still not returning all the items in the table? Also, if there's a better way to implement this, I'm open to it.

Update Route53 record using lambda nodejs not working

I am trying to update a record in Route53 using a lambda function and nodejs runtime.
The problem is I am getting no errors, no logs or anything from route53 to even understand why it is not working.
I have setup the following:
Lambda function
SNS to read messages from
Attached a policy to update/change record sets
My lambda code:
console.log('Running updateRecordSet');
/* global HOSTED_ZONE_ID*/
/* global DNS_RECORD_NAME*/
HOSTED_ZONE_ID = 'xxxx';
DNS_RECORD_NAME = 'dns-record.internal.example.com.';
var aws = require('aws-sdk');
var route53 = new aws.Route53();
exports.handler = async (event, context) => {
const message = event.Records[0].Sns.Message;
console.log('SNS message:', message);
try {
const data = JSON.parse(message);
if (data.ip) {
console.log('New IP: ', data.ip);
var newRecord = {
HostedZoneId: HOSTED_ZONE_ID,
ChangeBatch: {
Changes: [{
Action: 'UPSERT',
ResourceRecordSet: {
Name: DNS_RECORD_NAME,
Type: 'A',
ResourceRecords: [{Value: data.ip}],
TTL: 30,
}
}]
}
};
updateRecordSet(route53, DNS_RECORD_NAME, HOSTED_ZONE_ID, newRecord, function(err) {
if (err) {
return context.fail(err);
}
return context.succeed('OK');
});
}
} catch(err) {
console.error(err);
}
return message;
};
function updateRecordSet(route53, DNS_RECORD_NAME, HOSTED_ZONE_ID, newRecord, callback) {
console.log("Executing function updateRecordSet");
route53.changeResourceRecordSets(newRecord, function(err) {
if (err) {
console.log("Got an err");
return callback(err);
}
return console.log('Updated A record for', DNS_RECORD_NAME);
});
}
I get the output:
Function Logs:
START RequestId: 4ef801ba-c03c-4582-33a8-c078c46f0b03 Version: $LATEST
2019-04-07T04:18:55.201Z 4ef801ba-c03c-4582-83a8-c078c46f0b03 SNS message: {"ip": "10.1.1.1"}
2019-04-07T04:18:55.201Z 4ef801ba-c03c-4582-83a8-c078c46f0b03 New IP: 10.1.1.1
2019-04-07T04:18:55.201Z 4ef801ba-c03c-4582-83a8-c078c46f0b03 Executing function updateRecordSet
END RequestId: 4ef801ba-c03c-4582-33a8-c078c46f0b03
If the IAM policy is wrong I would at least get some kind of authentication error?
I could not get async to work with lambda for some reason but finally got working code.
This lambda will update or insert a record set in Route53 reading from SNS with a JSON message like {"ip": "10.1.1.1"}
console.log('Running updateRecordSet');
var AWS = require('aws-sdk');
/* global HOSTED_ZONE_ID*/
/* global DNS_RECORD_NAME*/
HOSTED_ZONE_ID = 'xxxxxx';
DNS_RECORD_NAME = 'dns-record.example.com.';
exports.handler = function(event, context, callback) {
var route53 = new AWS.Route53();
// Get message from SNS
var message = event.Records[0].Sns.Message;
const data = JSON.parse(message);
if (typeof data.ip !== "undefined") {
route53.changeResourceRecordSets({
HostedZoneId : HOSTED_ZONE_ID,
ChangeBatch : {
Changes : [{
Action: 'UPSERT',
ResourceRecordSet: {
Name: DNS_RECORD_NAME,
Type: 'A',
ResourceRecords: [
{
Value: data.ip
}
],
TTL: 30
}
}]
}
}, function (err, data) {
if (err)
console.log(err, err.stack);
else {
console.log('Updated Route53 DNS record ' + DNS_RECORD_NAME);
}
});
} else {
console.log('No IP found in message. Discarding.');
}
};
If you want to have full promise and await thing setup, you can try the below code. It has a few additional things like STS assume-role for cross-account ROUTE53 access. Additionally, it has a weighted logic to create multiple CNAMEs. I understand this does not fit your use case, however it may help somebody who stumbles upon a similar issue to create Weighted load balancing with CNAME.
console.log('Running route53 changeRecrodSet with CNAME');
/* global HOSTED_ZONE_ID*/
/* global DNS_RECORD_NAME*/
const HOSTED_ZONE_ID = "xxxx";
const DNS_RECORD_NAME = "xxxxxx.com";
var AWS = require('aws-sdk');
AWS.config.region = 'us-west-1';
async function update_recordset(route53, records){
return route53.changeResourceRecordSets(records).promise();
}
async function getcred(){
console.log("inside getcred");
var sts = new AWS.STS();
try {
let temp_cred = sts.assumeRole({
RoleArn: 'arn:aws:iam::xxxxxxxx',
RoleSessionName: 'awssdk'
}).promise();
console.log("TEMP",temp_cred);
return temp_cred;
}catch(err){
console.log("ERROR",err);
}
}
exports.handler = async (event) => {
const message = event.Records[0].Sns.Message;
console.log('SNS message:', message);
try{
const data = JSON.parse(message);
if (data.cname) {
console.log('New IP: ', data.cname);
const sts_result = await getcred();
console.log("STS_RESULT", sts_result);
AWS.config.update({
accessKeyId: sts_result.Credentials.AccessKeyId,
secretAccessKey: sts_result.Credentials.SecretAccessKey,
sessionToken: sts_result.Credentials.SessionToken
});
var route53 = new AWS.Route53();
console.log("ROUTE53 RESULT",route53);
const newRecord = {
HostedZoneId: HOSTED_ZONE_ID,
ChangeBatch: {
Changes: [
{
Action: 'UPSERT',
ResourceRecordSet: {
SetIdentifier: "elb",
Weight: 100,
Name: DNS_RECORD_NAME,
Type: 'CNAME',
ResourceRecords: [{ Value: "xxxxx.sxxxxx.com" }],
TTL: 300,
},
},
{
Action: 'UPSERT',
ResourceRecordSet: {
SetIdentifier: "cflare",
Weight: 100,
Name: DNS_RECORD_NAME,
Type: 'CNAME',
ResourceRecords: [{ Value: data.cname }],
TTL: 300,
},
}],
},
};
const results = await update_recordset(route53,newRecord);
console.log("Result", results);
}
}catch(err){
console.log("ERR",err);
}
};
You need to either put an async - await or just callback(). Both it is a bad practice.
I would do something like this:
console.log('Running updateRecordSet');
/* global HOSTED_ZONE_ID*/
/* global DNS_RECORD_NAME*/
HOSTED_ZONE_ID = 'xxxx';
DNS_RECORD_NAME = 'dns-record.internal.example.com.';
var aws = require('aws-sdk');
var route53 = new aws.Route53();
exports.handler = async (event) => {
const message = event.Records[0].Sns.Message;
console.log('SNS message:', message);
try {
const data = JSON.parse(message);
if (data.ip) {
console.log('New IP: ', data.ip);
var newRecord = {
HostedZoneId: HOSTED_ZONE_ID,
ChangeBatch: {
Changes: [{
Action: 'UPSERT',
ResourceRecordSet: {
Name: DNS_RECORD_NAME,
Type: 'A',
ResourceRecords: [{Value: data.ip}],
TTL: 30,
}
}]
}
};
let result = await route53.changeResourceRecordSets(newRecord);
console.log(result);
}
} catch(err) {
console.error(err);
}
return message;
};
Also you are right about the iam role, you will get an autherization error if your code runs all the functions correctly.
To get async/await to work with AWS sdk, you need promisify.
See example below...
console.log('Running updateRecordSet');
/* global HOSTED_ZONE_ID*/
/* global DNS_RECORD_NAME*/
HOSTED_ZONE_ID = 'xxxx';
DNS_RECORD_NAME = 'dns-record.internal.example.com.';
const aws = require('aws-sdk');
const route53 = new aws.Route53();
const { promisify } = require('util');
const changeResourceRecordSets = promisify(route53.changeResourceRecordSets.bind(route53));
exports.handler = async (event) => {
const message = event.Records[0].Sns.Message;
console.log('SNS message:', message);
try {
const data = JSON.parse(message);
if (data.ip) {
console.log('New IP: ', data.ip);
const newRecord = {
HostedZoneId: HOSTED_ZONE_ID,
ChangeBatch: {
Changes: [
{
Action: 'UPSERT',
ResourceRecordSet: {
Name: DNS_RECORD_NAME,
Type: 'A',
ResourceRecords: [{ Value: data.ip }],
TTL: 30,
},
}],
},
};
const results = await changeResourceRecordSets(newRecord);
if (results.ChangeInfo.Status === 'PENDING') {
console.log('Updated A record for', DNS_RECORD_NAME, results);
return {
statusCode: 200,
body: 'Success',
};
} else {
console.error(results);
return {
statusCode: 500,
body: 'Something went wrong',
};
}
}
} catch (err) {
console.error(err);
}
};

DynamoDB - The provided key element does not match the schema

I have a dynamodb table with attributes: userId, propertyInfo, and propertyId. userId is primary index. When I use the following lambda code to update(PUT) the item in the table, I get "The provided key element does not match the schema".
const AWS = require('aws-sdk'); // eslint-disable-line import/no-extraneous-dependencies
const dynamoDb = new AWS.DynamoDB.DocumentClient();
module.exports.update = (event, context, callback) => {
const timestamp = new Date().getTime();
const data = JSON.parse(event.body);
const params = {
TableName: process.env.DYNAMODB_TABLE,
Key: {
propertyId: event.pathParameters.id,
},
ExpressionAttributeNames: {
'#new_propertyInfo': 'propertyInfo',
},
ExpressionAttributeValues: {
':propertyInfo': data.propertyInfo,
},
UpdateExpression: 'SET #new_propertyInfo = :propertyInfo',
ReturnValues: 'ALL_NEW',
};
dynamoDb.update(params, (error, result) => {
// handle potential errors
if (error) {
console.error(error);
callback(null, {
statusCode: error.statusCode || 501,
headers: { 'Content-Type': 'text/plain' },
body: 'Couldn\'t fetch the item.',
});
return;
}
// create a response
const response = {
statusCode: 200,
body: JSON.stringify(result.Attributes),
};
callback(null, response);
});
};
Body of my update request is:
{
"propertyInfo":
{
"houseNumber": 2000,
"street": "easy st"
}
}
The event.pathParameters.id is obtained from /property/{id}. I need this id to search DB's propertyId. The userId is needed for authorization purpose. Search and update I need to search by propertyId. Could someone help to explain to me what I need to do to set this up correctly please?