DynamoDB update duplicates records in List field - amazon-web-services

I'm performing an update (or create if it doesn't exist) on a DynamoDB table with the following params:
{
"TableName": "TableName",
"Key": {
"PartitionKey": "TEST#",
"SortKey": "SK#123456"
},
"UpdateExpression": "set WaterTests = list_append(if_not_exists(WaterTests, :empty_list), :waterTestValues), MakeModel = :mm, Version = :v, Username = :username",
"ExpressionAttributeValues": {
":waterTestValues": [
{
"DateTest": "2022-03-29T09:40:50.985Z",
"IsValid": true,
"S3Path": "sample/s3/path"
}
],
":v": "v1",
":mm": "No Make Model",
":username": "no.user#redacted.com",
":empty_list": []
},
"ReturnValues": "UPDATED_NEW"
}
It invariably results in the following (for a non existing record, unique HK+SK):
{
"PartitionKey": "SESSION#",
"SortKey": "USER#no.user#redacted.com#999fe1b3-0b59-4a41-9eef-37d433566af0",
"WifiTests": [
{
"DateTest": "2022-03-29T09:40:50.985Z",
"IsValid": true,
"S3Path": "sample/s3/path"
},
{
"DateTest": "2022-03-29T09:40:50.985Z",
"IsValid": true,
"S3Path": "sample/s3/path"
}
]...
I'm using a standard update call as follows:
await docClient.update(params, function (err, data) {
if (err) console.log(err);
else console.log(data);
}).promise();
I have no clue on why the duplication always happens.

Related

Loopback custom connector implementation

I am trying to implement a custom loopback connector and it's not clear to me how this all works.
Here are my models:
{
"customer": {
"dataSource": "qb",
"public": false
},
"company": {
"dataSource": "qb",
"public": true
},
"payment": {
"dataSource": "qb",
"public": false
},
"invoice": {
"dataSource": "qb",
"public": false
}
}
The most important part to the model (and to save space) is
{
"relations": {
"company": {
"type": "belongsTo",
"model": "company",
"foreignKey": "id",
"primaryKey": "id"
}
}
}
And, in company.json
{
"name": "company",
"plural": "companies",
"base": "Model",
"idInjection": true,
"options": {
"validateUpsert": true
},
"properties": {
"id": {
"type": "string",
"required": true
}
},
"validations": [],
"relations": {
"customers": {
"type": "hasMany",
"model": "customer",
"foreignKey": "customerId"
},
"payments": {
"type": "hasMany",
"model": "payment",
"foreignKey": "customerId"
},
"invoices": {
"type": "hasMany",
"model": "customer",
"foreignKey": "customerId"
}
},
"acls": [],
"methods": {}
}
which, as expected, produces URLs like:
/companies/${id}/customers/${fk}
So, I try the swagger UI and submit: GET /companies/4620816365214377730/customers/456
The problem I have is now 2 fold:
It calls the all function on my connector every time - right away, that doesn't make sense. I've given it 2 specific ID's why would it possible want all of anything?
I managed the above and produced the results asked, but then loopback reports a 404:
{
"error": {
"statusCode": 404,
"name": "Error",
"message": "could not find a model with id 4620816365214377730",
"code": "MODEL_NOT_FOUND",
"stack": "Error: could not find a model with id 4620816365214377730"
}
}
So, I definitely don't get it - the first param in callback is the err, and the second is the result. I have literally hardcoded it to be right (I think)
How do I implement simple CRUD? Why does it not call my findById function? I have breakpoints everywhere
const {Connector: connector} = require('loopback-connector')
const util = require("util");
exports.initialize = function initializeDataSource(dataSource, callback) {
dataSource.connector = new QbConnector(dataSource.settings);
dataSource.connector.dataSource = dataSource;
};
exports.QbConnector = QbConnector
function QbConnector(settings, datasource) {
connector.call(this, 'quickbooks', settings)
this.datasource = datasource
this.client = require(`./qb`)(require('./axios'))
}
util.inherits(QbConnector, connector);
// connector.defineAliases(QbConnector.prototype, 'find', 'findById');
QbConnector.prototype.create = function(data, callback) {
console.log()
}
QbConnector.prototype.replaceOrCreate = function(model, data, options, cb) {
console.log()
}
QbConnector.prototype.findOne = function (filter,cb) {
console.log()
}
QbConnector.prototype.all = function(model, filter, callback) {
this.client[model]?.get(filter.where.id)
?.then(data => callback(null,{id: filter.where.id}))
?.catch(e => callback(JSON.stringify(e.response.data,null,4)))
}
QbConnector.prototype.count = function (whereClause,callback) {
console.log()
}
QbConnector.prototype.save = function(model, data, options, cb) {
console.log()
}
QbConnector.prototype.findById = function (id, filter, options) {
console.log()
}
When I step into the callback it's definition is a guaranteed error (the message I am seeing)
(function anonymous(number, plural, select, pluralFuncs, fmt
) {
return function(d) { return "could not find a model with id " + d["0"]; }
})

AWS DynamoDB batchWriteItem in Lamda is not working properly

I have an app created via AWS Amplify and I created an AWS Lambda function that aim to insert multiple data to one of the table in DynamoDB.
Firstly, I tried map attributes to documentClient.put({...params}).promise() and run Promise.all() on it. But some items got lost. For example, I add 40 items at once, but only 5-10 got added. I thought this could be some lambda limit issue. So I switch to batchWriteItem() and seems it remain the same or even worse (only 1 out of 40 got added). Here is the code I wrote:
export const addAvailabilities = async (
docClient: AWS.DynamoDB.DocumentClient,
newAvailabilities: Availability[],
expertId: string,
cognitoUserId: string
) => {
console.info('#addAvailabilities: Start')
try {
let restArray = [...newAvailabilities]
const promiseArray = []
while (restArray.length > 0) {
const executingArray = restArray.slice(0, 25)
const temp = batchAddAvailability(docClient, executingArray, expertId, cognitoUserId)
promiseArray.push(temp)
restArray = restArray.slice(25)
}
const result = await Promise.all(promiseArray)
console.info(result)
console.info('#addAvailabilities: End')
return result
} catch (err) {
console.error('#addAvailabilities: Error')
throw err
}
}
const mapBatchAddAvailabilityParams = (newAvailabilities: Availability[], expertId: string, cognitoUserId: string) => {
return newAvailabilities.map((availability, index) => {
const currentTime = `${moment().utc().format('YYYY-MM-DD[T]HH:mm:ss.SS')}${index}Z`
return {
PutRequest: {
Item: {
id: uuid(),
__typename: 'ExpertAvailability',
expertId: expertId,
owner: cognitoUserId,
startTime: availability.start,
status: 'available',
createdAt: currentTime,
updatedAt: currentTime
}
}
}
})
}
const batchAddAvailability = async (
docClient: AWS.DynamoDB.DocumentClient,
newAvailabilities: Availability[],
expertId: string,
cognitoUserId: string
) => {
console.info('#batchAddAvailability: Start')
try {
const batchParams = mapBatchAddAvailabilityParams(newAvailabilities, expertId, cognitoUserId)
const param = {
RequestItems: {
[process.env.API_TAP_EXPERTAVAILABILITYTABLE_NAME]: batchParams
}
}
console.info('params', JSON.stringify(param))
return docClient.batchWrite(param).promise()
} catch (err) {
console.error('#batchAddAvailability: Error')
throw err
}
}
I add this const currentTime = ${moment().utc().format('YYYY-MM-DD[T]HH:mm:ss.SS')}${index}Z` because I saw this post has similar issue as mine and that's the solution solve his problem. But it doesn't solve mine.
From the logs, params get format correctly like this:
{
"RequestItems": {
"Availability": [
{
"PutRequest": {
"Item": {
"id": "66a7b63e-a14b-4ba2-94a9-0dd7bf457efe",
"__typename": "ExpertAvailability",
"expertId": "8d30c1a4-685e-40bf-b884-6e50bb422e99",
"owner": "test-keycloak_eeb71cb1-6c11-4fb9-a721-ce5dc7d06269",
"startTime": "2021-10-03T23:00:00.000Z",
"status": "available",
"createdAt": "2021-09-08T10:24:28.880Z",
"updatedAt": "2021-09-08T10:24:28.880Z"
}
}
},
{
"PutRequest": {
"Item": {
"id": "162d839d-7fde-417e-994b-2dc12336c4cf",
"__typename": "ExpertAvailability",
"expertId": "8d30c1a4-685e-40bf-b884-6e50bb422e99",
"owner": "test-keycloak_eeb71cb1-6c11-4fb9-a721-ce5dc7d06269",
"startTime": "2021-10-04T00:00:00.000Z",
"status": "available",
"createdAt": "2021-09-08T10:24:28.881Z",
"updatedAt": "2021-09-08T10:24:28.881Z"
}
}
},
{
"PutRequest": {
"Item": {
"id": "dc257c75-9a27-482a-88c5-1747ffe97361",
"__typename": "ExpertAvailability",
"expertId": "8d30c1a4-685e-40bf-b884-6e50bb422e99",
"owner": "test-keycloak_eeb71cb1-6c11-4fb9-a721-ce5dc7d06269",
"startTime": "2021-10-04T01:00:00.000Z",
"status": "available",
"createdAt": "2021-09-08T10:24:28.932Z",
"updatedAt": "2021-09-08T10:24:28.932Z"
}
}
},
{
"PutRequest": {
"Item": {
"id": "75b2e911-e842-4f11-99ed-702c6cf1c485",
"__typename": "ExpertAvailability",
"expertId": "8d30c1a4-685e-40bf-b884-6e50bb422e99",
"owner": "test-keycloak_eeb71cb1-6c11-4fb9-a721-ce5dc7d06269",
"startTime": "2021-10-04T02:00:00.000Z",
"status": "available",
"createdAt": "2021-09-08T10:24:28.943Z",
"updatedAt": "2021-09-08T10:24:28.943Z"
}
}
},
{
"PutRequest": {
"Item": {
"id": "df65e151-1699-446d-ab3b-06aca707a2fb",
"__typename": "ExpertAvailability",
"expertId": "8d30c1a4-685e-40bf-b884-6e50bb422e99",
"owner": "test-keycloak_eeb71cb1-6c11-4fb9-a721-ce5dc7d06269",
"startTime": "2021-10-04T03:00:00.000Z",
"status": "available",
"createdAt": "2021-09-08T10:24:28.944Z",
"updatedAt": "2021-09-08T10:24:28.944Z"
}
}
},
{
"PutRequest": {
"Item": {
"id": "98a5bdd2-60d3-4d5d-9913-40b5c84c6d62",
"__typename": "ExpertAvailability",
"expertId": "8d30c1a4-685e-40bf-b884-6e50bb422e99",
"owner": "test-keycloak_eeb71cb1-6c11-4fb9-a721-ce5dc7d06269",
"startTime": "2021-10-04T04:00:00.000Z",
"status": "available",
"createdAt": "2021-09-08T10:24:28.945Z",
"updatedAt": "2021-09-08T10:24:28.945Z"
}
}
}
]
}
}
There is no UnprocessedItems or any errors return from the execution. But the items are just missing. The capacity of the table is on-demand so I think capacity shouldn't be a problem. Any ideas what's wrong? Many thanks

AWS Lambda updating object in DynamoDB

I'm trying to update object from DynamoDB for hours and I can't get it to work. I'm using DocumentClient library and its method update(). When I tested it with API Gateway, I got this error:
{
"errorType": "TypeError",
"errorMessage": "Cannot read property '_id' of undefined",
"trace": [
"TypeError: Cannot read property '_id' of undefined",
" at Runtime.exports.handler (/var/task/index.js:20:44)",
" at Runtime.handleOnce (/var/runtime/Runtime.js:66:25)"
]
}
Here is my code:
exports.handler = (event, context, callback) => {
console.log(event);
const id = event.listId;
const params = {
Key: {
"ListId": id
},
ExpressionAttributeNames: {
"#name": "Name",
"#shop": "Shop"
},
ExpressionAttributeValues: {
":name": event.listName,
":shop": {
"_id": event.listShop._id,
"name": event.listShop.name,
"address": event.listShop.address,
"city": event.listShop.city
}
},
TableName: "mk-lists",
UpdateExpression: "SET #name = :name, #shop = :shop"
};
dynamodb.update(params, (err, data) => {
if(err) {
console.log(err);
callback(err);
} else {
console.log(data);
callback(null, data);
}
});
};
I have Shop field in Lists table which is an object. Also, it is working when I test it in Lambda function. Can someone help me with this? Thanks in advance.
Here is my request body:
{
"listName": "Lista 13131",
"listShop": {
"_id": "933c836c-6868-4f56-a769-d59f5cbb231e",
"name": "DIS",
"address": "Podrinska 12",
"city": "Uzice"
}
}

Dynamodb Query : get matched objects with filter expression

My data is stored in a table "mysettings" in DynamoDB with the following schema : "pk" as hash key and "sk" as range key
Example data item:
{
"mSettings": {
"gflag": true,
"name": "green",
"type": "B"
},
"pk": "Group1",
"sk": "A1000",
"xSettings": {
"gflag": false,
"name": "blue",
"type": "A"
},
"ySettings": {
"gflag": false,
"name": "green",
"type": "B"
},
"zSettings": {
"gflag": true,
"name": "red",
"type": "A"
}
}
Now, here I am trying to fetch(filter) only those settings where my gflag field is true.
Like for above example item, it should return only "mSettings" & "zSettings" nodes.
Below is my tried sample code:
var AWS = require('aws-sdk');
let region = "us-east-1";
AWS.config.region = region;
var docClient = new AWS.DynamoDB.DocumentClient();
let settingsItem = ["mSettings", "xSettings", "ySettings", "zSettings"];
let params = {
TableName: "mysettings",
KeyConditionExpression: "#pk = :pk AND #sk = :sk",
ExpressionAttributeNames:{
"#pk": "pk",
"#sk": "sk"
},
// FilterExpression : "ySettings.gflag = :gflag",
ExpressionAttributeValues: {
":pk": 'Group1',
":sk": 'A1000',
":gflag" : true
}
};
//for(let settings in settingsItem){
// params['FilterExpression'] = settingsItem[settings] + ".gflag = :gflag";
//}
console.log('params = ', params)
docClient.query(params, function(err, data){
if(err){
console.log(err)
}
console.log('data = ', data)
})
Kindly suggest to retrieve only those objects where flag value is true.
DynamoDB filters operate at an item level. They determine whether or not an item is returned to you. They can't be used to control which groups of attributes are returned to you. You can easily do that client-side with the query results.
Here's an example of post-processing the query results object to remove the sub-objects where gflag is not true:
const _ = require('lodash');
function gflag_true(value, key) {
return _.isObject(value) && value.gflag;
}
const item = {
"mSettings": { "gflag": true, "name": "green", "type": "B" },
"pk": "Group1",
"sk": "A1000",
"xSettings": { "gflag": false, "name": "blue", "type": "A" },
"ySettings": { "gflag": false, "name": "green", "type": "B" },
"zSettings": { "gflag": true, "name": "red", "type": "A" }
}
// transform item returned from DocumentClient query method
const rc = _.pickBy(item, gflag_true)
This results in:
{
mSettings: { gflag: true, name: 'green', type: 'B' },
zSettings: { gflag: true, name: 'red', type: 'A' }
}

Using $slice with $regex together on subDocument array in mongodb

Hi I am stuck on a problem in mongo. Let me first show you an example of the data. This is a record from my table.
{
"_id": "3691149613248",
"following": [{
"content_id": "2584833593728",
"date": "2015-08-20 12:46:55"
}, {
"content_id": "3693447751360",
"date": "2015-09-11 12:17:55"
}, {
"content_id": "2582396936896",
"date": "2015-09-12 07:04:02"
}, {
"content_id": "3697346507456",
"date": "2015-09-14 09:56:03"
}, {
"content_id": "3697755500800",
"date": "2015-09-16 10:05:51"
}, {
"content_id": "2589701320192",
"date": "2015-09-16 10:51:19"
}, {
"content_id": "2585723555136",
"date": "2015-09-16 11:40:26"
}, {
"content_id": "3695996668352",
"date": "2015-09-16 12:50:25"
}, {
"content_id": "3694290368512",
"date": "2015-09-16 12:50:33"
}, {
"content_id": "3691210127552",
"date": "2015-09-16 13:02:57"
}, {
"content_id": "3694134958464",
"date": "2015-09-16 13:06:17"
}, {
"content_id": "3697315148736",
"date": "2015-09-17 06:58:35"
}, {
"content_id": "3692104837824",
"date": "2015-09-17 12:19:12"
}, {
"content_id": "3693400309376",
"date": "2015-09-22 05:43:04"
}]
}
I want to fetch following array with condition that only specific records to fetch i.e. content_ids with prefix 369 and fetch number of content_id specified in limit and offset.
I am using $slice for fetching records for given limit & offset for following array. But how to filter content_id along with $slice.
My current query:
db.collectionName.find({
_id: "3691149613248"
}, {
"following": {
"$slice": [0, 10]
}
});
This is fetching following array with content_id that is specified in limit & offset. But it is fetching all content_id including prefix 258& 369 but I only need content_id with prefix 369 using mongo query.
Can any one help??
You can use combination of $unwind and $match with mongo aggregation to get expected output like:
db.collection.aggregate({
$match: {
"_id": "3691149613248" // you can skip this condition if not required
}
}, {
$unwind: "$following"
}, {
$match: {
"following.content_id": {
$regex: /^369/
}
}
}, {
$group: {
_id: "$_id",
"following": {
$push: "$following"
}
}
})
If you want to apply skip and limit to above query then you can easily use it like:
db.collection.aggregate({
$match: {
"_id": "3691149613248" //use this if you want to filter out by _id
}
}, {
$unwind: "$following"
}, {
$match: {
"following.content_id": {
$regex: /^369/
}
}
}, {
$skip: 4 // you can set offset here
}, {
$limit: 3 // you can set limit here
}, {
$group: {
_id: "$_id",
"following": {
$push: "$following"
}
}
})
EDIT :
If you are using php version less than 5.4 then query will be as:
$search = "369";
$criteria = array(array("$match" => array("_id" => "3691149613248")),
array("$unwind" => "$following"),
array("$match" => array("following.content_id" => array("$regex" => new MongoRegex("/^$search/")))),
array("$skip" => 4), array("$limit" => 3),
array("$group" => array("_id" => "$_id", "following" => array("$push" => "$following"))));
$collection - > aggregate($criteria);
If you are using PHP version greater than 5.3 then just replace { and } braces with [ and ] respectively.