How can we insert the following data to aws dynamodb tale.
{
"userId":"4",
"gpId": "44",
"uname": "username",
"position":"Cloud Solution Engineer",
"companyName" : "xyz Technlologies",
"skills": [{"linux":"1","windows":"2","Docker": "3"}]
}
I have tried using the following code in node.js
var userId=event.userId;
var gpId=event.gpId;
var fbId=event.fbId;
var uname=event.uname;
var position=event.position;
var role=event.role;
var companyName=event.companyName;
var skills=event.skills;
dynamodb.putItem({
"TableName": tableName,
"Item" : {
"userId": {"N": userId},
"gpId": {"N": gpId},
"uname" : {"S":uname},
"position" : {"S":position},
"role" : {"S":role},
"companyName" : {"S":companyName},
"skills" : {"SS":skills}
}
I am trying the below code it is giving below error to me.
{
"errorMessage": "Expected params.Item['skills'].S to be a string",
"errorType": "InvalidParameterType",
"stackTrace": [
"ParamValidator.fail (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:50:37)",
"ParamValidator.validateType (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:218:10)",
"ParamValidator.validateString (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:150:14)",
"ParamValidator.validateScalar (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:130:21)",
"ParamValidator.validateMember (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:94:21)",
"ParamValidator.validateStructure (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:75:14)",
"ParamValidator.validateMember (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:88:21)",
"ParamValidator.validateMap (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:117:14)",
"ParamValidator.validateMember (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:92:21)",
"ParamValidator.validateStructure (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:75:14)"
]
}
I tried to change {"S": skills} with skills.
it is giving me following error:
{
"errorMessage": "Unexpected key '0' found in params.Item['skills']",
"errorType": "UnexpectedParameter",
"stackTrace": [
"ParamValidator.fail (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:50:37)",
"ParamValidator.validateStructure (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:77:14)",
"ParamValidator.validateMember (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:88:21)",
"ParamValidator.validateMap (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:117:14)",
"ParamValidator.validateMember (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:92:21)",
"ParamValidator.validateStructure (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:75:14)",
"ParamValidator.validateMember (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:88:21)",
"ParamValidator.validate (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:34:10)",
"Request.VALIDATE_PARAMETERS (/var/runtime/node_modules/aws-sdk/lib/event_listeners.js:109:42)",
"Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:105:20)"
]
}
I have tried applying map as suggested in an answer but it gives me the following error:
{
"errorMessage": "There were 3 validation errors:\n* UnexpectedParameter: Unexpected key 'linux' found in params.Item['skills'].M['0']\n* UnexpectedParameter: Unexpected key 'windows' found in params.Item['skills'].M['0']\n* UnexpectedParameter: Unexpected key 'Docker' found in params.Item['skills'].M['0']",
"errorType": "MultipleValidationErrors",
"stackTrace": [
"* UnexpectedParameter: Unexpected key 'linux' found in params.Item['skills'].M['0']",
"* UnexpectedParameter: Unexpected key 'windows' found in params.Item['skills'].M['0']",
"* UnexpectedParameter: Unexpected key 'Docker' found in params.Item['skills'].M['0']",
"ParamValidator.validate (/var/runtime/node_modules/aws-sdk/lib/param_validator.js:40:28)",
"Request.VALIDATE_PARAMETERS (/var/runtime/node_modules/aws-sdk/lib/event_listeners.js:109:42)",
"Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:105:20)",
"callNextListener (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:95:12)",
"/var/runtime/node_modules/aws-sdk/lib/event_listeners.js:75:9",
"finish (/var/runtime/node_modules/aws-sdk/lib/config.js:308:7)",
"/var/runtime/node_modules/aws-sdk/lib/config.js:324:9",
"EnvironmentCredentials.get (/var/runtime/node_modules/aws-sdk/lib/credentials.js:126:7)",
"getAsyncCredentials (/var/runtime/node_modules/aws-sdk/lib/config.js:318:24)",
"Config.getCredentials (/var/runtime/node_modules/aws-sdk/lib/config.js:338:9)"
]
}
skills is not a string set (SS), it should have a Map attribute type (M) after the following 0-index assignment:
var skills = event.skills[0];
http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_AttributeValue.html
Related
I have an issue in the past 3 days with 3% of the requests to our lambdas.
They fail due to connection timeout to other aws services. see stack trace in the same lambda init
2021-10-30T16:37:33.310Z 7954e15a-8ae7-491e-880b-f5b532bde961 INFO TypeError: Unable to generate certificate due to
RequestError: Error: connect ETIMEDOUT 52.4.211.23:443
at /var/task/node_modules/cognito-express/lib/strategy.js:42:23
at bound (domain.js:416:15)
at runBound (domain.js:427:12)
at tryCatcher (/var/task/node_modules/bluebird/js/release/util.js:16:23)
at Promise._settlePromiseFromHandler (/var/task/node_modules/bluebird/js/release/promise.js:547:31)
at Promise._settlePromise (/var/task/node_modules/bluebird/js/release/promise.js:604:18)
at Promise._settlePromise0 (/var/task/node_modules/bluebird/js/release/promise.js:649:10)
at Promise._settlePromises (/var/task/node_modules/bluebird/js/release/promise.js:725:18)
at _drainQueueStep (/var/task/node_modules/bluebird/js/release/async.js:93:12)
at _drainQueue (/var/task/node_modules/bluebird/js/release/async.js:86:9)
at Async._drainQueues (/var/task/node_modules/bluebird/js/release/async.js:102:5)
at Immediate.Async.drainQueues [as _onImmediate] (/var/task/node_modules/bluebird/js/release/async.js:15:14)
at processImmediate (internal/timers.js:464:21)
at process.topLevelDomainCallback (domain.js:147:15)
at process.callbackTrampoline (internal/async_hooks.js:129:24
2021-10-30T16:44:18.380Z 25392661-b635-4b73-9aed-67e655f13364 ERROR Unhandled Promise Rejection
{
"errorType": "Runtime.UnhandledPromiseRejection",
"errorMessage": "SequelizeConnectionError: connect ETIMEDOUT",
"reason": {
"errorType": "SequelizeConnectionError",
"errorMessage": "connect ETIMEDOUT",
"name": "SequelizeConnectionError",
"parent": {
"errorType": "Error",
"errorMessage": "connect ETIMEDOUT",
"code": "ETIMEDOUT",
"errorno": "ETIMEDOUT",
"syscall": "connect",
"fatal": true,
"stack": [
"Error: connect ETIMEDOUT",
" at Connection._handleTimeoutError (/var/task/node_modules/mysql2/lib/connection.js:189:17)",
" at listOnTimeout (internal/timers.js:557:17)",
" at processTimers (internal/timers.js:500:7)"
]
},
"original": {
"errorType": "Error",
"errorMessage": "connect ETIMEDOUT",
"code": "ETIMEDOUT",
"errorno": "ETIMEDOUT",
"syscall": "connect",
"fatal": true,
"stack": [
"Error: connect ETIMEDOUT",
" at Connection._handleTimeoutError (/var/task/node_modules/mysql2/lib/connection.js:189:17)",
" at listOnTimeout (internal/timers.js:557:17)",
" at processTimers (internal/timers.js:500:7)"
]
},
"stack": [
"SequelizeConnectionError: connect ETIMEDOUT",
" at ConnectionManager.connect (/var/task/node_modules/sequelize/lib/dialects/mysql/connection-manager.js:126:17)",
" at processTicksAndRejections (internal/process/task_queues.js:95:5)",
" at async ConnectionManager._connect (/var/task/node_modules/sequelize/lib/dialects/abstract/connection-manager.js:318:24)",
" at async /var/task/node_modules/sequelize/lib/dialects/abstract/connection-manager.js:250:32",
" at async ConnectionManager.getConnection (/var/task/node_modules/sequelize/lib/dialects/abstract/connection-manager.js:280:7)",
" at async /var/task/node_modules/sequelize/lib/sequelize.js:613:26",
" at async MySQLQueryInterface.select (/var/task/node_modules/sequelize/lib/dialects/abstract/query-interface.js:953:12)",
" at async Function.findAll (/var/task/node_modules/sequelize/lib/model.js:1753:21)",
" at async /var/task/src/routes/root/index_routes.js:20:18"
]
},
"promise": {},
"stack": [
"Runtime.UnhandledPromiseRejection: SequelizeConnectionError: connect ETIMEDOUT",
" at process.<anonymous> (/var/runtime/index.js:35:15)",
" at process.emit (events.js:412:35)",
" at process.emit (domain.js:470:12)",
" at processPromiseRejections (internal/process/promises.js:245:33)",
" at processTicksAndRejections (internal/process/task_queues.js:96:32)"
]
}
here is the mysql init code
if (global.sequelize != null) {
console.count('\x1b[32mRESERCH: connection exported from globals instead of creation\x1b[0m');
module.exports = global.sequelize;
} else {
console.count('\x1b[31mRESERCH: new connection created\x1b[0m');
global.sequelize = new Sequelize(
s.sqlDbName,
s.sqlUsername,
s.sqlPassword, {
host: s.sqlDbHost,
dialect: 'mysql',
// to print out the query + it's time
// check if causes performance issues
benchmark: true,
pool: {
max: 5,
min: 0,
idle: 10000
}
});
It's only some of the requests but it's causing a lot of errors for our users.
couldn't detect the root cause.
seems to be solved by changing the lambda's VPC slightly
it had 2 subnets, public and private.
removed the public
Not sure why it worked. maybe it forces the lambda to connect to db and cognito from the internal ip
Trying to create a dynamodb table item resource which contains a DynamoDB List AttributeValue:
resource "aws_dynamodb_table_item" "job" {
table_name = var.some_table.id
hash_key = var.some_table.hash_key
item = <<ITEM
{
"partitionKey": {"S": "JOBID#1"},
"workloads": [{ "S" : "w1" }, { "S" : "w2" }]
}
ITEM
}
but fails with:
Error: Invalid format of "item": Decoding failed: json: cannot unmarshal array into Go value of type dynamodb.AttributeValue
Works ok if workloads is a string type e.g. {"S": "w1"} but not when a list. What am I doing wrong? Is this resource able to create List AttributeValues?
I'm using Terraform v1.0.0
It should be:
"partitionKey": {"S": "JOBID#1"},
"workloads": {"L": [{ "S" : "w1" }, { "S" : "w2" }]}
where L is for list. The info about format is here.
I am trying to connect to snowflake from EMR cluster launched by airflow EMR operator but I'm getting the following error
py4j.protocol.Py4JJavaError: An error occurred while calling
o147.load. : java.lang.ClassNotFoundException: Failed to find data
source: net.snowflake.spark.snowflake. Please find packages at
http://spark.apache.org/third-party-projects.html
These are the steps I am adding to my EMRaddsteps operator to run the script load_updates.py and I am describing my snowflake packages in the "Args"
STEPS = [
{
"Name" : "convo_facts",
"ActionOnFailure" : "TERMINATE_CLUSTER",
"HadoopJarStep" : {
"Jar" : "command-runner.jar",
"Args" : ["spark-submit", "s3://dev-data-lake/spark_files/cf/load_updates.py", \
"--packages net.snowflake:snowflake-jdbc:3.8.0,net.snowflake:spark-snowflake_2.11:2.4.14-spark_2.4", \
"INPUT=s3://dev-data-lake/table_exports/public/", \
"OUTPUT=s3://dev-data-lake/emr_output/cf/"]
}
}
]
JOB_FLOW_OVERRIDES = {
'Name' : 'cftest',
'LogUri' : 's3://dev-data-lake/emr_logs/cf/log.txt',
'ReleaseLabel' : 'emr-5.32.0',
'Instances' : {
'InstanceGroups' : [
{
'Name' : 'Master nodes',
'Market' : 'ON_DEMAND',
'InstanceRole' : 'MASTER',
'InstanceType' : 'r6g.4xlarge',
'InstanceCount' : 1,
},
{
'Name' : 'Slave nodes',
'Market' : 'ON_DEMAND',
'InstanceRole' : 'CORE',
'InstanceType' : 'r6g.4xlarge',
'InstanceCount' : 3,
}
],
'KeepJobFlowAliveWhenNoSteps' : True,
'TerminationProtected' : False
},
'Applications' : [{
'Name' : 'Spark'
}],
'JobFlowRole' : 'EMR_EC2_DefaultRole',
'ServiceRole' : 'EMR_DefaultRole'
}
And, this is how I am adding snowflake creds in my load_updates.py script to extract into a pyspark dataframe.
# Set options below
sfOptions = {
"sfURL" : "xxxx.us-east-1.snowflakecomputing.com",
"sfUser" : "user",
"sfPassword" : "xxxx",
"sfDatabase" : "",
"sfSchema" : "PUBLIC",
"sfWarehouse" : ""
}
SNOWFLAKE_SOURCE_NAME = "net.snowflake.spark.snowflake"
query_sql = """select * from cf""";
messages_new = spark.read.format(SNOWFLAKE_SOURCE_NAME) \
.options(**sfOptions) \
.option("query", query_sql) \
.load()
Not sure if I am missing something here or where am I doing wrong.
The option --package should be placed before s3://.../load_updates.py in the spark-submit command. Otherwise, it'll be considered as application argument.
Try with this :
STEPS = [
{
"Name": "convo_facts",
"ActionOnFailure": "TERMINATE_CLUSTER",
"HadoopJarStep": {
"Jar": "command-runner.jar",
"Args": [
"spark-submit",
"--packages",
"net.snowflake:snowflake-jdbc:3.8.0,net.snowflake:spark-snowflake_2.11:2.4.14-spark_2.4",
"s3://dev-data-lake/spark_files/cf/load_updates.py",
"INPUT=s3://dev-data-lake/table_exports/public/",
"OUTPUT=s3://dev-data-lake/emr_output/cf/"
]
}
}
]
The schema:
type User {
id: ID!
createdCurricula: [Curriculum]
}
type Curriculum {
id: ID!
title: String!
creator: User!
}
The resolver to query all curricula of a given user:
{
"version" : "2017-02-28",
"operation" : "Query",
"query" : {
## Provide a query expression. **
"expression": "userId = :userId",
"expressionValues" : {
":userId" : {
"S" : "${context.source.id}"
}
}
},
"index": "userIdIndex",
"limit": #if(${context.arguments.limit}) ${context.arguments.limit} #else 20 #end,
"nextToken": #if(${context.arguments.nextToken}) "${context.arguments.nextToken}" #else null #end
}
The response map:
{
"items": $util.toJson($context.result.items),
"nextToken": #if(${context.result.nextToken}) "${context.result.nextToken}" #else null #end
}
The query:
query {
getUser(id: "0b6af629-6009-4f4d-a52f-67aef7b42f43") {
id
createdCurricula {
title
}
}
}
The error:
{
"data": {
"getUser": {
"id": "0b6af629-6009-4f4d-a52f-67aef7b42f43",
"createdCurricula": null
}
},
"errors": [
{
"path": [
"getUser",
"createdCurricula"
],
"locations": null,
"message": "Can't resolve value (/getUser/createdCurricula) : type mismatch error, expected type LIST"
}
]
}
The CurriculumTable has a global secondary index titled userIdIndex, which has userId as the partition key.
If I change the response map to this:
$util.toJson($context.result.items)
The output is the following:
{
"data": {
"getUser": {
"id": "0b6af629-6009-4f4d-a52f-67aef7b42f43",
"createdCurricula": null
}
},
"errors": [
{
"path": [
"getUser",
"createdCurricula"
],
"errorType": "MappingTemplate",
"locations": [
{
"line": 4,
"column": 5
}
],
"message": "Unable to convert \n{\n [{\"id\":\"87897987\",\"title\":\"Test Curriculum\",\"userId\":\"0b6af629-6009-4f4d-a52f-67aef7b42f43\"}],\n} to class java.lang.Object."
}
]
}
If I take that string and run it through a console.log in my frontend app, I get:
{
[{"id":"2","userId":"0b6af629-6009-4f4d-a52f-67aef7b42f43"},{"id":"1","userId":"0b6af629-6009-4f4d-a52f-67aef7b42f43"}]
}
That's clearly an object. How do I make it... not an object, so that AppSync properly reads it as a list?
SOLUTION
My response map had a set of curly braces around it. I'm pretty sure that was placed there in the generator by Amazon. Removing them fixed it.
I think I'm not seeing the complete view of your schema, I was expecting something like:
schema {
query: Query
}
Where Query is RootQuery, in fact you didn't share us your Query definition. Assuming you have the right Query definition. The main problem is in your response template.
> "items": $util.toJson($context.result.items)
This means that you are passing a collection named: *"items"* to Graphql query engine. And you are referring this collection as "createdCurricula". So solve this issue your response-mapping-template is the right place to fix. How? just replace the above line with the following.
"createdCurricula": $util.toJson($context.result.items),
Please the main thing to note here is, the mapping template is a bridge between your datasources and qraphql, feel free to make any computation, or name mapping but don't forget that object names in that response json are the one should match in schema/query definition.
Thanks.
Musema
change to result type to $util.toJson($ctx.result.data.posts)
The exception msg says that it expected a type list.
Looking at:
{
[{"id":"2","userId":"0b6af629-6009-4f4d-a52f-67aef7b42f43"},{"id":"1","userId":"0b6af629-6009-4f4d-a52f-67aef7b42f43"}]
}
I don't see that createdCurricula is a LIST.
What is currently in DDB is:
"id": "0b6af629-6009-4f4d-a52f-67aef7b42f43",
"createdCurricula": null
I read online about scripting in aws elacticsearch service. It said that AWS ES doesn't support dynamic scripting so I am writing aggregations using scripts stored in my disk. I wrote the following query
{
"query":{
"match_all":{}
},
"aggs":{
"inBoundRecieved":{
"scripted_metric":{
"init_script":{
"file": "init.groovy"
},
"map_script": {
"file": "map.groovy"
},
"combine_script": {
"file":"comb.groovy"
},
"params":{
"field":"call_direction"
},
"reduce_script": {
"file": "red.groovy"
}
}
}
}
}
But I keep getting this error.
Parse Failure [Unknown key for a START_OBJECT in [inBoundRecieved]: [init_script]
I have searched a lot online but couldn't find a good solution.
Full Error ->
{
"error" : "SearchPhaseExecutionException[Failed to execute phase [query], all shards failed; shardFailures {[M-Sp4ZKmQCW0C4Ph2FIA1Q][plivoredshift][0]: RemoteTransportException[[Merlin][inet[/x.x.x.x:y]][indices:data/read/search[phase/query]]]; nested: SearchParseException[[plivoredshift][0]: query[ConstantScore(*:*)], from[-1],size[-1]: Parse Failure [Failed to parse source [{ \"query\":{ \"match_all\":{} }, \"aggs\":{ \"inBoundRecieved\":{ \"scripted_metric\":{ \"init_script\":{ \"file\": \"init.groovy\" }, \"map_script\": { \"file\": \"map.groovy\" }, \"combine_script\": { \"file\":\"comb.groovy\" }, \"params\": { \"field\":\"call_direction\" }, \"reduce_script\": { \"file\": \"red.groovy\" } } } }}]]]; nested: SearchParseException[[plivoredshift][0]: query[ConstantScore(*:*)],from[-1],size[-1]: Parse Failure [Unknown key for a START_OBJECT in [inBoundRecieved]: [init_script].]]; }{[M-Sp4ZKmQCW0C4Ph2FIA1Q][plivoredshift][1]: RemoteTransportException[[Merlin][inet[/x.x.x.x:y]][indices:data/read/ search[phase/query]]]; nested: SearchParseException[[plivoredshift][1]: query[ConstantScore(*:*)],from[-1],size[-1]: Parse Failure [Failed to parse source [{ \"query\":{ \"match_all\":{} }, \"aggs\":{ \"inBoundRecieved\":{ \"scripted_metric\":{ \"init_script\":{ \"file\": \"init.groovy\" }, \"map_script\": { \"file\": \"map.groovy\" }, \"combine_script\": { \"file\":\"comb.groovy\" }, \"params\":{ \"field\": \"call_direction\" }, \"reduce_script\": { \"file\": \"red.groovy\" } } } }}]]]; nested: SearchParseException[[plivoredshift][1]: query[ConstantScore(*:*)],from[-1],size[-1]: Parse Failure [Unknown key for a START_OBJECT in [inBoundRecieved]: [init_script].]]; }{[M-Sp4ZKmQCW0C4Ph2FIA1Q][plivoredshift][2]: RemoteTransportException[[Merlin][inet[/x.x.x.x:y]][indices:data/read/search[phase/query]]]; nested: SearchParseException[[plivoredshift][2]: query[ConstantScore(*:*)],from[-1],size[-1]: Parse Failure [Failed to parse source [{ \"query\":{ \"match_all\":{} }, \"aggs\":{ \"inBoundRecieved\":{ \"scripted_metric\":{ \"init_script\":{ \"file\": \"init. groovy\" }, \"map_script\": { \"file\": \"map.groovy\" }, \"combine_script\": { \"file\":\"comb.groovy\" }, \"params\":{ \"field\":\"call_direction\" }, \"reduce_script\": { \"file\": \"red.groovy\" } } } }}]]]; nested: SearchParseException[[plivoredshift][2]: query[ConstantScore(*:*)],from[-1],size[-1]: Parse Failure [Unknown key for a START_OBJECT in [inBoundRecieved]: [init_script]. ]]; }{[M-Sp4ZKmQCW0C4Ph2FIA1Q][plivoredshift][3]: RemoteTransportException[[Merlin][inet[/x.x.x.x:y]][indices:data/read/search[phase/query]]]; nested: SearchParseException[[plivoredshift][3]: query[ConstantScore(*:*)],from[-1],size[-1]: Parse Failure [Failed to parse source [{ \"query\":{ \"match_all\":{} }, \"aggs\":{ \"inBoundRecieved\":{ \"scripted_metric\":{ \"init_script\":{ \"file\": \"init. groovy\" }, \"map_script\": { \"file\": \"map.groovy\" }, \"combine_script\": { \"file\":\"comb.groovy\" }, \"params\":{ \"field\":\"call_direction\" }, \"reduce_script\": { \"file\": \"red.groovy\" } } } }}]]]; nested: SearchParseException[[plivoredshift][3]: query[ConstantScore(*:*)],from[-1],size[-1]: Parse Failure [Unknown key for a START_OBJECT in [inBoundRecieved]: [init_script].
"status":400
}
Here are my scripts
init.groovy
_agg['transactions'] = []
map.groovy
if (doc['call_direction']=="inbound" {_agg.transactions.add(1)} else {_agg.transactions.add(0)}
comb.groovy
inBoundRecieved=0; for( t in _agg.transactions) {inBoundRecieved+=t}; return inBoundRecieved
red.groovy
inBoundRecieved=0; for( a in _aggs) {inBoundRecieved += a}; return inBoundRecieved
I have been following this tutorial from the ElacticSearch website
Looks like scripting can be used in AWS elasticsearch service for version 5 now:
http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/aes-supported-resources.html
https://forums.aws.amazon.com/thread.jspa?threadID=217896&start=25&tstart=0