Update item field in Amazon DynamoDB without overwriting other fields - amazon-web-services

I have an Amazon DynamoDB table called "users' that has one user with the fields -UserId, FirstName, LastName, SignedInAt, SignedOutAt, IsSigned in.
I want to only update certain fields, not all, but when I execute the following code, the fields that I am not updating disappear.
So, I am just left with the UserID, SignedInAt, and IsSignedIn feilds. How do I update without the other fields disappearing?
I am using JavaScript.
Thanks!!!
Before executing code:
await dynamo
.put({
TableName: "users",
Item: {
UserId: requestJSON.UserId,
SignedInAt: new Date().toLocaleTimeString(), // 11:18:48 AM
IsSignedIn: true
}
})
.promise();
After:

The problem with your code is you are using the put method. To update an Amazon DynamoDB item, you need to call the updateItem method. Here is an example in Java V2 API (you can port this code to your programming language) that updates an item based on the key.
public static void updateTableItem(DynamoDbClient ddb,
String tableName,
String key,
String keyVal,
String name,
String updateVal){
HashMap<String,AttributeValue> itemKey = new HashMap<String,AttributeValue>();
itemKey.put(key, AttributeValue.builder().s(keyVal).build());
HashMap<String,AttributeValueUpdate> updatedValues =
new HashMap<String,AttributeValueUpdate>();
// Update the column specified by name with updatedVal
updatedValues.put(name, AttributeValueUpdate.builder()
.value(AttributeValue.builder().s(updateVal).build())
.action(AttributeAction.PUT)
.build());
UpdateItemRequest request = UpdateItemRequest.builder()
.tableName(tableName)
.key(itemKey)
.attributeUpdates(updatedValues)
.build();
try {
ddb.updateItem(request);
} catch (ResourceNotFoundException e) {
System.err.println(e.getMessage());
System.exit(1);
} catch (DynamoDbException e) {
System.err.println(e.getMessage());
System.exit(1);
}
// snippet-end:[dynamodb.java2.update_item.main]
System.out.println("Done!");
}

SOLVED
const AWS = require("aws-sdk");
const dynamo = new AWS.DynamoDB.DocumentClient();
exports.handler = async(event, context) => {
let body;
let user;
let statusCode = 200;
const headers = {
"Content-Type": "application/json"
};
try {
let requestJSON = JSON.parse(event.body);
switch (event.routeKey) {
case "GET /items":
body = await dynamo.scan({ TableName: "users" }).promise();
break;
case "PUT /signIn":
await dynamo
.update({
TableName: "users",
Key: {
UserId: requestJSON.UserId
},
UpdateExpression: "set SignedInAt = :a, IsSignedIn = :b, SignedOutAt = :c",
ExpressionAttributeValues: {
":a": new Date().toLocaleTimeString(),
":b": true,
":c": ""
}
})
.promise();
body = `Put item ${requestJSON.UserId}`;
break;
case "PUT /signOut":
await dynamo
.update({
TableName: "users",
Key: {
UserId: requestJSON.UserId
},
UpdateExpression: "set SignedOutAt = :a, IsSignedIn = :b",
ExpressionAttributeValues: {
":a": new Date().toLocaleTimeString(),
":b": false,
}
})
.promise();
body = `Put item ${requestJSON.UserId}`;
break;
default:
throw new Error(`Unsupported route: "${event.routeKey}"`);
}
}
catch (err) {
statusCode = 400;
body = err.message;
}
finally {
body = JSON.stringify(body);
}
return {
statusCode,
body,
headers
};
};

Related

How to put an item on DynamoDB table using AWS SDK for Node.js?

I have a DynamoDB table named MYTABLE
I'm trying to insert some data into this table via LambdaEdge.
My Lambda function is triggered via a CloudFront Distribution.
I have this function :
var AWS = require("aws-sdk");
AWS.config.update({region: 'us-east-1'});
var ddb = new AWS.DynamoDB({apiVersion: '2012-08-10'});
exports.handler = async (event, context) => {
var params = {
TableName: 'MYTABLE',
Item: {
'id' : {'S': 'something'},
'value' : {'S': 'something'}
}
};
// Call DynamoDB to add the item to the table
await ddb.putItem(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data);
}
});
const response = {
status: '302',
statusDescription: 'Found',
headers: {
location: [{
key: 'Location',
value: 'https://www.google.com',
}],
},
};
return response;
};
The function runs properly (the redirection works) but no data is inserted into the table.
Does anyone know what I'm doing wrong please ?
Thanks.
Cheers,

Lambda used SNS to send SMS but stopped working

I was just having fun in sending me SMS from a Lambda called via API Gateway (REST API). No problem at all until it suddendly stopped sending SMS. I'm actually using a free tier level on my AWS account, but in no way I have reached the maximum sendable SMS limit.
Permissions and roles have not been modified from yesterday when text messages still were arriving to my verified mobile number.
As you can see, inside my code, I used a "test2" variable to return the result for SMS sending but it's barely useless.
Is there any way to better undestand the error behind this problem? And why am I getting it all of a sudden?
My lambda code:
const AWS = require("aws-sdk");
const dynamo = new AWS.DynamoDB.DocumentClient();
const SNS = new AWS.SNS();
const tableName = "Testing";
const smsMessage = {
PhoneNumber: '+39#mynumber#',
Message: 'Someone called your api!'
};
const httpMethodConverter = 'http-method';
const pathRequestConverter = "resource-path";
const headers = {
"Access-Control-Allow-Origin":"https://#myaccount#.github.io",
"Content-Type": "application/json",
"Access-Control-Allow-Methods": "GET, POST, PATCH, PUT, DELETE, OPTIONS",
"Access-Control-Allow-Headers": "Origin, Content-Type, X-Auth-Token"
};
let body;
let statusCode = 200;
let test2;
exports.handler = async (event, context) => {
try {
let test = JSON.stringify(event);
let callContext = event.context;
let httpMethod = callContext[httpMethodConverter];
let path = callContext[pathRequestConverter];
let methodAndPath = httpMethod + " "+ path;
test2= await SNS.publish(smsMessage).promise();
switch (methodAndPath) {
case "DELETE /items/{id}":
await dynamo
.delete({
TableName: tableName,
Key: {
Id: parseInt(JSON.parse(test).params.path.id)
}
})
.promise();
body = `Deleted item ${JSON.parse(test).params.path.id}`;
break;
case "GET /items/{id}":
body = await dynamo
.get({
TableName: tableName,
Key: {
Id: parseInt(JSON.parse(test).params.path.id)
}
})
.promise();
break;
case "GET /items":
body = await dynamo.scan({ TableName: tableName }).promise();
break;
case "POST /items":
var bodyParser = "body-json";
var bodyPassed = JSON.parse(test)[bodyParser];
await dynamo
.put({
TableName: tableName,
Item: {
Id: parseInt(bodyPassed.Id),
Type: bodyPassed.Type
}
})
.promise();
body = "Put item "+ bodyPassed.Type+ " id: " + parseInt(bodyPassed.Id);
break;
default:
throw new Error(`Unsupported route: "${event.context}"`);
}
} catch (err) {
statusCode = 400;
body = err.message;
} finally {
body = JSON.stringify(body);
}
return {
statusCode,
body,
headers,
test2
};
};
This is the answer for my SNS.publish(message).promise
"test2": {
"ResponseMetadata": {
"RequestId": "b2fbd9fc-4cc0-54e8-a660-82#########"
},
"MessageId": "c94393bf-cbb3-5cb9-8155-c#########"
}
If you enable delivery status logging through e.g. https://docs.aws.amazon.com/sns/latest/dg/sms_stats_cloudwatch.html, you should receive an error message detailing why the message could not be delivered. Most likely, you've hit the default monthly $1 spend limit quota.

Calling RDS data service executeStatement throws "Parameter X has value with no field set"

Not sure if I clearly understood how to provide parameter values when executing rds.executeStatement command.
When I execute the below code I get this error thrown -
{
"errorType": "BadRequestException",
"errorMessage": "Parameter \"userId\" has value with no field set",
"code": "BadRequestException",
"message": "Parameter \"userId\" has value with no field set"
}
Here is my code, How am I supposed to provide the userId and givenName values to the parameters array here.
const AWS = require('aws-sdk');
var RDS = new AWS.RDSDataService({
apiVersion: '2018-08-01'
});
exports.handler = async (event, context) => {
var userId;
var givenName;
var params = {
secretArn: 'secretArn',
resourceArn: 'resourceArn',
database: 'db',
parameters: [{
name: "userId",
value: {
"stringValue": userId
}
},
{
name: "givenName",
value: {
"stringValue": givenName
}
}
]
};
event.Records.forEach(function(record) {
if (record.eventName == 'INSERT') {
userId = record.dynamodb.NewImage.pk.S;
givenName = record.dynamodb.NewImage.sk.S;
params.sql = `INSERT INTO Users (UserId, GivenName) VALUES(:userId, :givenName);`
}
});
await RDS.executeStatement(params).promise();
console.log(params.parameters[0].value);
return 'done';
};
UPDATE March 13th
Attaching the cloudwatch logs printing out userId and givenName -
UPDATE March 16th - Function Updates
const AWS = require('aws-sdk');
const RDS = new AWS.RDSDataService({ apiVersion: '2018-08-01' });
exports.handler = async (event, context) => {
var userId;
var givenName;
var count = 0;
var params = {
secretArn: 'secretArn',
resourceArn: 'resourceArn',
database: 'bol_db',
parameters: [{
name: "userId",
value: {
"stringValue": userId
}
},
{
name: "givenName",
value: {
"stringValue": givenName
}
}
]
};
const promises = event.Records.map(async function(record) {
count++;
context.callbackWaitsForEmptyEventLoop = false;
if (record.eventName == 'INSERT') {
userId = record.dynamodb.NewImage.pk.S;
givenName = record.dynamodb.NewImage.sk.S;
console.log('userId - ' + userId);
console.log('givenName - ' + givenName);
console.log('Params -'+params.parameters);
params.sql = "INSERT INTO Users (UserId, GivenName) VALUES(:userId, :givenName);"
let result = await RDS.executeStatement(params).promise();
console.log('Result -' + result);
}
});
await Promise.all(promises);
console.log(count);
return 'done';
};
It seems that you're setting "stringValue": userId before userId has a value. In JavaScript you can't assign userId later and expect it to be propagated to all places that you used it before.
You should try with var params = {...} inside the .map function or, alternatively, inside the .map function you can loop through the parameter list and if you find the correct one, assign the value then.

DynamoDB returning null nextToken when there are still results in database

I have a GraphQL API (AppSync) backed by a DynamoDB table keyed a specific id with timestamp as the range key. I want to retrieve all possible history for that id so I wrote a query in my GraphQL schema that would allow me to do so. Here's the request vtl:
{
"version": "2017-02-28",
"operation": "Query",
"query": {
"expression": "id = :id",
"expressionValues": {
":id": {
"S": "$context.args.id"
}
}
},
"nextToken": $util.toJson($util.defaultIfNullOrEmpty($context.args.nextToken, null))
}
There could be thousands of items in the ddb table for an id so I wrote a Lambda function to query for all of them and return the result in a list as such (I know the code can be simplified):
exports.handler = async function (event, context, callback) {
const graphqlClient = new appsync.AWSAppSyncClient({
url: process.env.APPSYNC_ENDPOINT,
region: process.env.AWS_REGION,
auth: {
type: 'AWS_IAM',
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
sessionToken: process.env.AWS_SESSION_TOKEN
}
},
disableOffline: true
});
const query = gql`query GetAllItemsById(
$id: String!
$nextToken: String
) {
getAllItemsById(
id: $id
nextToken: $nextToken
) {
exampleField {
subField1
subField2
subField3
}
nextToken
}
}
`;
const initialResult = await graphqlClient.query({
query,
variables: {
id: event.id
}
});
var finalResult = initialResult.data.getAllItemsById.exampleField;
var nextToken = initialResult.data.getAllItemsById.nextToken;
while (nextToken !== null) {
const result = await graphqlClient.query({
query,
variables: {
id: event.id,
nextToken: nextToken
}
});
finalResult = finalResult.concat(result.data.getAllItemsById.exampleField);
nextToken = result.data.getAllItemsById.nextToken;
}
console.log("Total Results: " + finalResult.length);
return callback(null, finalResult);
};
For some reason, not all items are being returned. nextToken is null before all results are returned. I know DDB has a 1MB limit for query which is why I'm paginating using nextToken but why is it still not returning all the items in the table? Also, if there's a better way to implement this, I'm open to it.

DynamoDB - The provided key element does not match the schema

I have a dynamodb table with attributes: userId, propertyInfo, and propertyId. userId is primary index. When I use the following lambda code to update(PUT) the item in the table, I get "The provided key element does not match the schema".
const AWS = require('aws-sdk'); // eslint-disable-line import/no-extraneous-dependencies
const dynamoDb = new AWS.DynamoDB.DocumentClient();
module.exports.update = (event, context, callback) => {
const timestamp = new Date().getTime();
const data = JSON.parse(event.body);
const params = {
TableName: process.env.DYNAMODB_TABLE,
Key: {
propertyId: event.pathParameters.id,
},
ExpressionAttributeNames: {
'#new_propertyInfo': 'propertyInfo',
},
ExpressionAttributeValues: {
':propertyInfo': data.propertyInfo,
},
UpdateExpression: 'SET #new_propertyInfo = :propertyInfo',
ReturnValues: 'ALL_NEW',
};
dynamoDb.update(params, (error, result) => {
// handle potential errors
if (error) {
console.error(error);
callback(null, {
statusCode: error.statusCode || 501,
headers: { 'Content-Type': 'text/plain' },
body: 'Couldn\'t fetch the item.',
});
return;
}
// create a response
const response = {
statusCode: 200,
body: JSON.stringify(result.Attributes),
};
callback(null, response);
});
};
Body of my update request is:
{
"propertyInfo":
{
"houseNumber": 2000,
"street": "easy st"
}
}
The event.pathParameters.id is obtained from /property/{id}. I need this id to search DB's propertyId. The userId is needed for authorization purpose. Search and update I need to search by propertyId. Could someone help to explain to me what I need to do to set this up correctly please?