AWS DynamoDB batchWriteItem in Lamda is not working properly - amazon-web-services

I have an app created via AWS Amplify and I created an AWS Lambda function that aim to insert multiple data to one of the table in DynamoDB.
Firstly, I tried map attributes to documentClient.put({...params}).promise() and run Promise.all() on it. But some items got lost. For example, I add 40 items at once, but only 5-10 got added. I thought this could be some lambda limit issue. So I switch to batchWriteItem() and seems it remain the same or even worse (only 1 out of 40 got added). Here is the code I wrote:
export const addAvailabilities = async (
docClient: AWS.DynamoDB.DocumentClient,
newAvailabilities: Availability[],
expertId: string,
cognitoUserId: string
) => {
console.info('#addAvailabilities: Start')
try {
let restArray = [...newAvailabilities]
const promiseArray = []
while (restArray.length > 0) {
const executingArray = restArray.slice(0, 25)
const temp = batchAddAvailability(docClient, executingArray, expertId, cognitoUserId)
promiseArray.push(temp)
restArray = restArray.slice(25)
}
const result = await Promise.all(promiseArray)
console.info(result)
console.info('#addAvailabilities: End')
return result
} catch (err) {
console.error('#addAvailabilities: Error')
throw err
}
}
const mapBatchAddAvailabilityParams = (newAvailabilities: Availability[], expertId: string, cognitoUserId: string) => {
return newAvailabilities.map((availability, index) => {
const currentTime = `${moment().utc().format('YYYY-MM-DD[T]HH:mm:ss.SS')}${index}Z`
return {
PutRequest: {
Item: {
id: uuid(),
__typename: 'ExpertAvailability',
expertId: expertId,
owner: cognitoUserId,
startTime: availability.start,
status: 'available',
createdAt: currentTime,
updatedAt: currentTime
}
}
}
})
}
const batchAddAvailability = async (
docClient: AWS.DynamoDB.DocumentClient,
newAvailabilities: Availability[],
expertId: string,
cognitoUserId: string
) => {
console.info('#batchAddAvailability: Start')
try {
const batchParams = mapBatchAddAvailabilityParams(newAvailabilities, expertId, cognitoUserId)
const param = {
RequestItems: {
[process.env.API_TAP_EXPERTAVAILABILITYTABLE_NAME]: batchParams
}
}
console.info('params', JSON.stringify(param))
return docClient.batchWrite(param).promise()
} catch (err) {
console.error('#batchAddAvailability: Error')
throw err
}
}
I add this const currentTime = ${moment().utc().format('YYYY-MM-DD[T]HH:mm:ss.SS')}${index}Z` because I saw this post has similar issue as mine and that's the solution solve his problem. But it doesn't solve mine.
From the logs, params get format correctly like this:
{
"RequestItems": {
"Availability": [
{
"PutRequest": {
"Item": {
"id": "66a7b63e-a14b-4ba2-94a9-0dd7bf457efe",
"__typename": "ExpertAvailability",
"expertId": "8d30c1a4-685e-40bf-b884-6e50bb422e99",
"owner": "test-keycloak_eeb71cb1-6c11-4fb9-a721-ce5dc7d06269",
"startTime": "2021-10-03T23:00:00.000Z",
"status": "available",
"createdAt": "2021-09-08T10:24:28.880Z",
"updatedAt": "2021-09-08T10:24:28.880Z"
}
}
},
{
"PutRequest": {
"Item": {
"id": "162d839d-7fde-417e-994b-2dc12336c4cf",
"__typename": "ExpertAvailability",
"expertId": "8d30c1a4-685e-40bf-b884-6e50bb422e99",
"owner": "test-keycloak_eeb71cb1-6c11-4fb9-a721-ce5dc7d06269",
"startTime": "2021-10-04T00:00:00.000Z",
"status": "available",
"createdAt": "2021-09-08T10:24:28.881Z",
"updatedAt": "2021-09-08T10:24:28.881Z"
}
}
},
{
"PutRequest": {
"Item": {
"id": "dc257c75-9a27-482a-88c5-1747ffe97361",
"__typename": "ExpertAvailability",
"expertId": "8d30c1a4-685e-40bf-b884-6e50bb422e99",
"owner": "test-keycloak_eeb71cb1-6c11-4fb9-a721-ce5dc7d06269",
"startTime": "2021-10-04T01:00:00.000Z",
"status": "available",
"createdAt": "2021-09-08T10:24:28.932Z",
"updatedAt": "2021-09-08T10:24:28.932Z"
}
}
},
{
"PutRequest": {
"Item": {
"id": "75b2e911-e842-4f11-99ed-702c6cf1c485",
"__typename": "ExpertAvailability",
"expertId": "8d30c1a4-685e-40bf-b884-6e50bb422e99",
"owner": "test-keycloak_eeb71cb1-6c11-4fb9-a721-ce5dc7d06269",
"startTime": "2021-10-04T02:00:00.000Z",
"status": "available",
"createdAt": "2021-09-08T10:24:28.943Z",
"updatedAt": "2021-09-08T10:24:28.943Z"
}
}
},
{
"PutRequest": {
"Item": {
"id": "df65e151-1699-446d-ab3b-06aca707a2fb",
"__typename": "ExpertAvailability",
"expertId": "8d30c1a4-685e-40bf-b884-6e50bb422e99",
"owner": "test-keycloak_eeb71cb1-6c11-4fb9-a721-ce5dc7d06269",
"startTime": "2021-10-04T03:00:00.000Z",
"status": "available",
"createdAt": "2021-09-08T10:24:28.944Z",
"updatedAt": "2021-09-08T10:24:28.944Z"
}
}
},
{
"PutRequest": {
"Item": {
"id": "98a5bdd2-60d3-4d5d-9913-40b5c84c6d62",
"__typename": "ExpertAvailability",
"expertId": "8d30c1a4-685e-40bf-b884-6e50bb422e99",
"owner": "test-keycloak_eeb71cb1-6c11-4fb9-a721-ce5dc7d06269",
"startTime": "2021-10-04T04:00:00.000Z",
"status": "available",
"createdAt": "2021-09-08T10:24:28.945Z",
"updatedAt": "2021-09-08T10:24:28.945Z"
}
}
}
]
}
}
There is no UnprocessedItems or any errors return from the execution. But the items are just missing. The capacity of the table is on-demand so I think capacity shouldn't be a problem. Any ideas what's wrong? Many thanks

Related

How to set or not set audio selector in AWS media convert?

I got this error, when creating job for AWS media convert:
Invalid selector_sequence_id [0] specified for audio_description [1].
I do not even need sound for my output mp4 video.
My intention is to loop for 2 second an image (png or jpg) and add a fade effect for the first frames.
How would you change the sent json?
{
"middlewareStack": {},
"input": {
"Queue": "arn:aws:mediaconvert:eu-central-1:634617701827:queues/Default",
"UserMetadata": {},
"Role": "arn:aws:iam::634617701827:role/service-role/MediaConvert_Default_Role",
"Settings": {
"TimecodeConfig": {
"Anchor": "00:00:00:00",
"Source": "EMBEDDED"
},
"OutputGroups": [
{
"Name": "File Group",
"Outputs": [
{
"Preset": "createPromoVideo",
"Extension": "mp4",
"NameModifier": "_fade",
"VideoDescription": {
"CodecSettings": {
"FilterGraph": "fade=out:150:30"
},
"ScalingBehavior": "DEFAULT",
"TimecodeInsertion": "DISABLED",
"AntiAlias": "ENABLED",
"Sharpness": 50,
"Height": 1080,
"Width": 1080
},
"AudioDescriptions": [
{
"AudioSelector": {
"SelectorSettings": [
{
"AudioSelectorName": "Default"
}
]
},
"CodecSettings": {
"Codec": "AAC",
"AacSettings": {
"Bitrate": 96000,
"CodingMode": "CODING_MODE_2_0",
"SampleRate": 48000
}
}
}
]
}
],
"OutputGroupSettings": {
"Type": "FILE_GROUP_SETTINGS",
"FileGroupSettings": {
"Destination": "s3://t44-post-cover/8fui.mp4",
"DestinationSettings": {
"S3Settings": {
"AccessControl": {
"CannedAcl": "PUBLIC_READ"
}
}
}
}
}
}
],
"Inputs": [
{
"FileInput": "s3://t44-post-cover/8fui",
"VideoSelector": {
"ColorSpace": "FOLLOW"
},
"FilterEnable": "AUTO",
"TimecodeSource": "ZEROBASED",
"InputClippings": [
{
"StartTimecode": "00:00:00:00",
"EndTimecode": "00:00:02:00"
}
],
"FilterGraph": "fade=in:0:30",
"AudioSelectors": {
"Default": {
"DefaultSelection": "DEFAULT"
}
}
}
]
},
"AccelerationSettings": {
"Mode": "DISABLED"
},
"StatusUpdateInterval": "SECONDS_60",
"Priority": 0
}
}
AWS MediaConvert requires you to have at least one Audio Selector.
Just provide it with this simple one:
"Inputs": [
...
{
"AudioSelectors": {
"Audio Selector 1": {
"Offset": 0,
"DefaultSelection": "DEFAULT",
"SelectorType": "LANGUAGE_CODE",
"ProgramSelection": 1,
"LanguageCode": "ENM"
}
},
...
},
UPDATE:
A more barebones one:
"Inputs": [
...
{
"AudioSelectors": {
"Audio Selector 1": {
DefaultSelection: 'DEFAULT',
},
}
},
...
},

Loopback custom connector implementation

I am trying to implement a custom loopback connector and it's not clear to me how this all works.
Here are my models:
{
"customer": {
"dataSource": "qb",
"public": false
},
"company": {
"dataSource": "qb",
"public": true
},
"payment": {
"dataSource": "qb",
"public": false
},
"invoice": {
"dataSource": "qb",
"public": false
}
}
The most important part to the model (and to save space) is
{
"relations": {
"company": {
"type": "belongsTo",
"model": "company",
"foreignKey": "id",
"primaryKey": "id"
}
}
}
And, in company.json
{
"name": "company",
"plural": "companies",
"base": "Model",
"idInjection": true,
"options": {
"validateUpsert": true
},
"properties": {
"id": {
"type": "string",
"required": true
}
},
"validations": [],
"relations": {
"customers": {
"type": "hasMany",
"model": "customer",
"foreignKey": "customerId"
},
"payments": {
"type": "hasMany",
"model": "payment",
"foreignKey": "customerId"
},
"invoices": {
"type": "hasMany",
"model": "customer",
"foreignKey": "customerId"
}
},
"acls": [],
"methods": {}
}
which, as expected, produces URLs like:
/companies/${id}/customers/${fk}
So, I try the swagger UI and submit: GET /companies/4620816365214377730/customers/456
The problem I have is now 2 fold:
It calls the all function on my connector every time - right away, that doesn't make sense. I've given it 2 specific ID's why would it possible want all of anything?
I managed the above and produced the results asked, but then loopback reports a 404:
{
"error": {
"statusCode": 404,
"name": "Error",
"message": "could not find a model with id 4620816365214377730",
"code": "MODEL_NOT_FOUND",
"stack": "Error: could not find a model with id 4620816365214377730"
}
}
So, I definitely don't get it - the first param in callback is the err, and the second is the result. I have literally hardcoded it to be right (I think)
How do I implement simple CRUD? Why does it not call my findById function? I have breakpoints everywhere
const {Connector: connector} = require('loopback-connector')
const util = require("util");
exports.initialize = function initializeDataSource(dataSource, callback) {
dataSource.connector = new QbConnector(dataSource.settings);
dataSource.connector.dataSource = dataSource;
};
exports.QbConnector = QbConnector
function QbConnector(settings, datasource) {
connector.call(this, 'quickbooks', settings)
this.datasource = datasource
this.client = require(`./qb`)(require('./axios'))
}
util.inherits(QbConnector, connector);
// connector.defineAliases(QbConnector.prototype, 'find', 'findById');
QbConnector.prototype.create = function(data, callback) {
console.log()
}
QbConnector.prototype.replaceOrCreate = function(model, data, options, cb) {
console.log()
}
QbConnector.prototype.findOne = function (filter,cb) {
console.log()
}
QbConnector.prototype.all = function(model, filter, callback) {
this.client[model]?.get(filter.where.id)
?.then(data => callback(null,{id: filter.where.id}))
?.catch(e => callback(JSON.stringify(e.response.data,null,4)))
}
QbConnector.prototype.count = function (whereClause,callback) {
console.log()
}
QbConnector.prototype.save = function(model, data, options, cb) {
console.log()
}
QbConnector.prototype.findById = function (id, filter, options) {
console.log()
}
When I step into the callback it's definition is a guaranteed error (the message I am seeing)
(function anonymous(number, plural, select, pluralFuncs, fmt
) {
return function(d) { return "could not find a model with id " + d["0"]; }
})

AWS API Gateway as proxy to dynamo DB HTTP Get mapping template

I have a API gateway which does a get to the tables stored in dynamo DB.
The table stored looks like as JSON as show below
{
"photos": {
"page": 1,
"pages": "1234",
"perpage": 100,
"photo": [
{
"farm": 1,
"id": "12345678901",
"isfamily": 0,
"isfriend": 0,
"ispublic": 1,
"owner": "23456789#A12",
"secret": "abc123d456",
"server": "1234",
"title": "Sample photo 1"
},
{
"farm": 2,
"id": "23456789012",
"isfamily": 0,
"isfriend": 0,
"ispublic": 1,
"owner": "34567890#B23",
"secret": "bcd234e567",
"server": "2345",
"title": "Sample photo 2"
}
],
"total": "123398"
},
"srini": "srini"
}
With out integration response mapping template I get the table as shown below
{
"Count": 1, "Items": [
{
"photos": {
"M": {
"photo": {
"L": [
{
"M": {
"owner": {
"S": "23456789#A12"
},
"server": {
"S": "1234"
},
"ispublic": {
"N": "1"
},
"isfriend": {
"N": "0"
},
"farm": {
"N": "1"
},
"id": {
"S": "12345678901"
},
"secret": {
"S": "abc123d456"
},
"title": {
"S": "Sample photo 1"
},
"isfamily": {
"N": "0"
}
}
},
{
"M": {
"owner": {
"S": "34567890#B23"
},
"server": {
"S": "2345"
},
"ispublic": {
"N": "1"
},
"isfriend": {
"N": "0"
},
"farm": {
"N": "2"
},
"id": {
"S": "23456789012"
},
"secret": {
"S": "bcd234e567"
},
"title": {
"S": "Sample photo 2"
},
"isfamily": {
"N": "0"
}
}
}
]
},
"perpage": {
"N": "100"
},
"total": {
"S": "123398"
},
"pages": {
"S": "1234"
},
"page": {
"N": "1"
}
}
},
"srini": {
"S": "srini"
}
} ], "ScannedCount": 1
}
I am trying to retrieve in the JSON format so that web client takes the table from Dynamo in JSON format .The mapping template I am trying to write is as follows
#set($inputRoot = $input.path('$'))
{
#foreach($elem in $inputRoot.Items) {
"srini": "$elem.srini.S",
"pages": "$elem.photos.pages.S",
#foreach($elemnext in $elem.photos.photo) {
"id": "$elemnext.id.S"
}#if($foreach.hasNext),#end
#end
}#if($foreach.hasNext),#end
#end
}
I only can retrieve srini as show below
Response Body
{
{
"srini": "srini",
"pages": ""
}
}
All other data is not retreived ,What is the right way to write mapping template ,Can any one let me know please?
#set($inputRoot = $input.path('$'))
{
#foreach($elem in $inputRoot.Items) {
"srini": "$elem.srini.S",
"pages": "$elem.photos.M.pages.S",
#foreach($elemnext in $elem.photos.M.photo.L)
{
"id": "$elemnext.M.id.S"
} #if($foreach.hasNext),#end
#end
}#if($foreach.hasNext),#end
#end
}

Elasticsearch dynamic template to match several exact fields

I'm currently struggling to simplify my mapping template files for Elasticsearch. Indeed, I got several Object fields that have the same structure (e.g. source and destination here)
Is there a way to set up Dynamic template so that it can match several patterns ?
Here's what I execute:
POST /_template/mapping-lol
{
"template": "*-newevents-*",
"mappings": {
"log": {
"dynamic_templates": [
{
"system": {
"match_pattern": "regex",
"match": "^(source|destination)$",
"mapping": {
"properties": {
"name": {
"dynamic": false,
"type": "object",
"properties": {
"first": {
"type": "text"
},
"last": {
"type": "text"
}
}
},
"ip": {
"type": "ip"
}
}
}
}
}
],
"properties": {
"source": {
"type": "object",
"dynamic": true
},
"destination": {
"type": "object",
"dynamic": true
}
}
}
}
}
POST /tenant-newevents-1/log
{
"source": {
"name": {
"first": "John",
"last": "Doe"
},
"ip": "1.2.3.4"
},
"destination": {
"name": {
"first": "Jane",
"last": "Doe"
},
"ip": "3.4.5.6"
}
}
GET /tenant-newevents-1
This above does not work...
I've got plenty of these same schemes to match (~20).
Thank you very much for your help !
OK I found out what went wrong: Fields mustn't be mapped at all for Dynamic mapping to proceed. Removing "source" and "destination" schemes in mapping worked.
POST /_template/mapping-lol
{
"template": "*-newevents-*",
"mappings": {
"log": {
"dynamic_templates": [
{
"system": {
"match_pattern": "regex",
"match": "^(source|destination)$",
"mapping": {
"properties": {
"name": {
"dynamic": false,
"type": "object",
"properties": {
"first": {
"type": "text"
},
"last": {
"type": "text"
}
}
},
"ip": {
"type": "ip"
}
}
}
}
}
],
"properties": {}
}
}
}

Parity POA: the validator's are not getting paid in ETH for sealing blocks

I am working on creating Parity private Blockchain, however,the validator's are not getting paid in ETH for sealing blocks.
I use this command line to check the balance:
curl -X POST -H "Content-Type: application/json" --data '{"jsonrpc":"2.0","method":"eth_getBalance","params":["0x0037a6b811ffeb6e072da21179d11b1406371c63", "latest"],"id":1}' http://172.0.0.1:8545
I want to ask how can I fix these problem.
{
"name": "Testnet",
"engine": {
"authorityRound": {
"params": {
"gasLimitBoundDivisor": "0x400",
"stepDuration": "2",
"validators" : {
"list": [ "0xa19b0e4f7ba1d5f74960c0aad794756a0a16eab4", "0x9c8f23e0a9377bd98322f8333142eadbaed200e8", "0x2f2033e303d4bf17403521e0c1830bac4ba09323", "0xe883b46f02ecd0e624082fe6ff12af0337ba0cde", "0xbdc56eb866933e7ba827fa293d4545ef2a350ce2"]
}
}
}
},
"params": {
"maximumExtraDataSize": "0x20",
"minGasLimit": "0x1388",
"gasLimitBoundDivisor": "0x400",
"networkID" : "0x11"
},
"genesis": {
"seal": {
"authorityRound": {
"step": "0x0",
"signature": "0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
}
},
"difficulty": "0x20000",
"gasLimit": "0x1312D00"
},
"accounts": {
"0x0000000000000000000000000000000000000001": { "balance": "1", "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },
"0x0000000000000000000000000000000000000002": { "balance": "1", "builtin": { "name": "sha256", "pricing": { "linear": { "base": 60, "word": 12 } } } },
"0x0000000000000000000000000000000000000003": { "balance": "1", "builtin": { "name": "ripemd160", "pricing": { "linear": { "base": 600, "word": 120 } } } },
"0x0000000000000000000000000000000000000004": { "balance": "1", "builtin": { "name": "identity", "pricing": { "linear": { "base": 15, "word": 3 } } } },
"0x00Ea169ce7e0992960D3BdE6F5D539C955316432": { "balance": "1606938044258990275541962092341162602522202993782792835301376" }
}
}
You are querying the wrong balance:
0x0037a6b811ffeb6e072da21179d11b1406371c63 is not in your validator node list.
You only specified the following validators:
"list": [
"0xa19b0e4f7ba1d5f74960c0aad794756a0a16eab4",
"0x9c8f23e0a9377bd98322f8333142eadbaed200e8",
"0x2f2033e303d4bf17403521e0c1830bac4ba09323",
"0xe883b46f02ecd0e624082fe6ff12af0337ba0cde",
"0xbdc56eb866933e7ba827fa293d4545ef2a350ce2"
]
Try to get the balance of these nodes.