Flutter aws amplify not returning data when calling graphql api - amazon-web-services

On button click I have programmed to call a graphql api which is connected to a Lambda function and the function is pulling data from a dynamodb table. The query does not produce any error, but it doesn't give me any results as well. I have also checked the cloudwatch logs and I dont see any traces of the function being called. Not sure on the careless mistake I am making here.
Here is my api
void findUser() async {
try {
String graphQLDocument = '''query getUserById(\$userId: ID!) {
getUserById(userId: \$id) {
id
name
}
}''';
var operation = Amplify.API.query(
request: GraphQLRequest<String>(
document: graphQLDocument,
variables: {'id': 'USER-14160000000'}));
var response = await operation.response;
var data = response.data;
print('Query result: ' + data);
} on ApiException catch (e) {
print('Query failed: $e');
}
}
Here is my lambda function -
const getUserById = require('./user-queries/getUserById');
exports.handler = async (event) => {
var userId = event.arguments.userId;
var name = event.arguments.name;
var avatarUrl = event.arguments.avatarUrl;
//console.log('Received Event - ', JSON.stringify(event,3));
console.log(userId);
switch(event.info.fieldName) {
case "getUserById":
return getUserById(userId);
}
};
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region: 'ca-central-1'});
async function getUserById(userId) {
const params = {
TableName:"Bol-Table",
KeyConditionExpression: 'pk = :hashKey and sk = :sortKey',
ExpressionAttributeValues: {
':hashKey': userId,
':sortKey': 'USER'
}
};
try {
const Item = await docClient.query(params).promise();
console.log(Item);
return {
id: Item.Items[0].pk,
name: Item.Items[0].details.displayName,
avatarUrl: Item.Items[0].details.avatarUrl,
createdAt: Item.Items[0].details.createdAt,
updatedAt: Item.Items[0].details.updatedAt
};
} catch(err) {
console.log("BOL Error: ", err);
}
}
module.exports = getUserById;
Upon button click I get this

Moving my comment to an answer:
Can you try changing your graphQLDocumnet to
String graphQLDocument = '''query getUserById(\$id: ID!) {
getUserById(userId: \$id) {
id
name
}
}''';
Your variable is $userId and then $id. Try calling it $id in both places like in your variables object.

Your flutter code is working fine but in lambda from the aws is returning blank string "" to not to print anything

Related

query on dynamodb through lambda using pk and sk returns undefined

I have a lambda function that makes a call to the dynamodb table using pk and sk. For some reason I am getting the return data as undefined. Upon checking the cloudwatch logs I see the below error messages. What could be the typo I am doing here?
2021-10-01T00:15:03.104Z cdd7201f-0c95-4283-9257-c07324998896 INFO BOL Data: undefined
2021-10-01T00:15:03.124Z cdd7201f-0c95-4283-9257-c07324998896 INFO BOL Error: TypeError: Cannot read property 'PK' of undefined
at getUserById (/var/task/getUserById.js:16:27)
at processTicksAndRejections (internal/process/task_queues.js:95:5)
Here is the lambda code the error is referring to -
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient();
async function getUserById(userId) {
console.log('USERID:',userId);
const params = {
TableName:"Bol",
KeyConditionExpression: 'PK = :hashKey and SK = :sortKey',
ExpressionAttributeValues: {
':hashKey': userId,
':sortKey': 'USER'
}
};
try {
const { Item } = await docClient.query(params).promise();
console.log('BOL Data:',Item);
return { id: Item.PK, name: Item.Data.displayName };
} catch(err) {
console.log("BOL Error: ", err);
}
}
module.exports = getUserById;
Below is the data I am supposed to receive on lambda -
Its the way you are Initialization the Item
you can try using the below methods to get the objects
try {
const Item = await docClient.query(params).promise();
Item.Items.forEach(function(item) {
let buffer=item.Data+" -- "+item.PK;
console.log("buffer ",buffer)
});
} catch(err) {
console.log("BOL Error: ", err);
}
There's are multiple ways you can get the favourable response refer below:
Making Requests with the Document Client
Formatting DynamoDB data to normal JSON in AWS Lambda

Session leak on spanner insert from cloud function

I'm trying to insert data into spanner through cloud function, using post request. I thing that I'm doing everything as described in the documentation, and i just can't understand what causes the next error:
"Error: 1 session leak(s) detected.
at _requests.onIdle.then (/srv/node_modules/#google-cloud/spanner/build/src/session-pool.js:193:25)
at <anonymous>"
And there is my cloud function
const {Spanner} = require('#google-cloud/spanner');
module.exports.http = (req, res) => {
const projectId = 'project-id';
const instanceId = 'instance-id';
const databaseId = 'database-id';
const spanner = new Spanner({
projectId: projectId,
});
const instance = spanner.instance(instanceId);
const database = instance.database(databaseId);
let sqlResponse = "";
database.runTransaction(async (err, transaction) => {
if (err) {
res.status(500).send(JSON.stringify({message: err, requestBody: req.body}));
return;
}
try {
const data = req.body;
const [rowCount] = await transaction.runUpdate({
sql:
'INSERT Feedbacks (age, comment, gender, rating) VALUES (#age, #comment, #gender, #rating)',
params: {
age: data.age.toString(),
comment: data.comment,
gender: data.gender,
rating: data.rating.toString(),
},
});
sqlResponse = 'Successfully inserted ' + rowCount + ' record into the Feedbacks table.';
await transaction.commit();
res.status(200).send(JSON.stringify({message: sqlResponse, requestBody: req.body}));
} catch (err) {
res.status(500).send(JSON.stringify({message: err, requestBody: req.body}));
} finally {
database.close();
}
});
};
Your code appears to be correct. As noted by #Mayeru in the comments for your question, the first thing to confirm is that you're inserting a new record with a unique value specified for the column that is your table's primary key column.
Another possibility that could be causing the issue you are encountering is that you are trying to test the function using the "Testing" UI of the Cloud Console's Cloud Functions > "Function details" section. If so then you may be either using an empty request body or a malformed request body when you click the "Test the function" button. In the "Triggering event" textarea that appears above the "Test the function" button, make sure you have entered a valid JSON request body which includes the elements and values that your INSERT statement expects.
For example a "Triggering event" JSON request body like the following should work:
{"singerId":"1001","firstName":"Test","lastName":"Singer"}
Using the following "nodeInsert" function that's similar to the code you've shared:
const {Spanner} = require('#google-cloud/spanner');
module.exports.nodeInsert = (req, res) => {
const projectId = 'my-project';
const instanceId = 'my-instance';
const databaseId = 'my-database';
const spanner = new Spanner({
projectId: projectId,
});
const instance = spanner.instance(instanceId);
const database = instance.database(databaseId);
let sqlResponse = "";
database.runTransaction(async (err, transaction) => {
if (err) {
res
.status(500)
.send(JSON.stringify({message: err, requestBody: req.body}));
transaction.end();
console.error('Transaction terminated.');
return;
}
try {
const data = req.body;
const parsedSingerId = parseInt(data.singerId, 10);
const [rowCount] = await transaction.runUpdate({
sql:
'INSERT Singers (SingerId, FirstName, LastName) VALUES (#singerId, #firstName, #lastName)',
params: {
singerId: parsedSingerId,
firstName: data.firstName,
lastName: data.lastName,
},
});
sqlResponse = 'Successfully inserted ' + rowCount + ' record into the Singers table.';
await transaction.commit();
res
.status(200)
.send(JSON.stringify({message: sqlResponse, requestBody: req.body}));
} catch (err) {
res
.status(500)
.send(JSON.stringify({message: err, requestBody: req.body}));
transaction.end();
console.error('Transaction terminated.');
} finally {
database.close();
}
});
};

Using aws-lambda to insert data into DynamoDB

When trying to place a new entry to the table it misses the execution of the put function. Fuuny, it used to work, and now for whichever reasons it doesn't. allValues is an array, and returns the data fully. Whenever trying to call saveData it does execute everything apart put function.
For some reason, I have tried it a few times, and it worked fine, but suddenly it doesn't place the entry anymore. Tried to play around, but can't seem to find an issue? Might be caused by asynchronous?
//Amazon sdk
let AWS = require("aws-sdk");
let db = require('database');
//Amazon config
AWS.config.update({
region: "us-east-1",
endpoint: "https://dynamodb.us-east-1.amazonaws.com"
});
//DynamoDb client
var docClient = new AWS.DynamoDB.DocumentClient();
//Axios will handle HTTP requests to web service
let axios = require ('axios');
//The ID of the student's data that I will download
let requestID= '*********';
//URL where student data is available
let url = '*************************';
exports.handler = async (event) => {
//Get synthetic data
let allValues = (await axios.get(url + requestID)).data.target;
let allStart = (await axios.get(url + requestID)).data.start;
for(let i = 0; i < allValues.length; i++){
//db.saveData(i, allValues[i], allStart);
}
db.saveData();
};
and my database.js
et AWS = require("aws-sdk");
//Create new DocumentClient
let documentClient = new AWS.DynamoDB.DocumentClient();
//Returns all of the connection IDs
//module.exports.saveData = async (sId, sValue, sTime) => {
module.exports.saveData = async () => {
let params = {
TableName: "SynData",
Item:{
"synId": 66,
"synValue": 1579.21,
"synTime": "2019-01-01"
}
};
//return documentClient.put(params).promise();
documentClient.put(params, function(err, data) {
if (err) {
console.error("Unable to add item. Error JSON:", JSON.stringify(err, null, 2));
} else {
console.log("Added item:", JSON.stringify(data, null, 2));
}
});
};
The logs do not show any return for put function.
saveData() is async, therefore it returns a promise. You need to await on it. Also, I don't understand why you gave up on the .promise() call. Using callbacks is outdated and much more convoluted.
Your code should look like this:
module.exports.saveData = async () => {
let params = {
TableName: "SynData",
Item:{
"synId": 66,
"synValue": 1579.21,
"synTime": "2019-01-01"
}
};
return await documentClient.put(params).promise();
};
On your client, just await on saveData():
for(let i = 0; i < allValues.length; i++){
await db.saveData(i, allValues[i], allStart);
}

Amazon Lambda writing to DynamoDB

I'm new to dynamoDB and I'm trying to write some data to the table using Lambda.
So far I have this:
'AddFood': function () {
var FoodName = this.event.request.intent.slots.FoodName.value;
var FoodCalories = this.event.request.intent.slots.FoodCalories.value;
console.log('FoodName : ' + FoodName);
const params = {
TableName: 'Foods',
Item: {
'id': {"S": 3},
'calories': {"S": FoodCalories},
'food': {"S": FoodName}
}
};
writeDynamoItem(params, myResult=>{
var say = '';
say = myResult;
say = FoodName + ' with ' + FoodCalories + ' calories has been added ';
this.response.speak(say).listen('try again');
this.emit(':responseReady');
});
function writeDynamoItem(params, callback) {
var AWS = require('aws-sdk');
AWS.config.update({region: AWSregion});
var docClient = new AWS.DynamoDB();
console.log('writing item to DynamoDB table');
docClient.putItem(params, function (err, data) {
if (err) {
callback(err, null)
} else {
callback(null, data)
}
});
}
}
Does anyone know why the data is not appearing in the database?
I have checked the IAM and the policy is set to AmazonDynamoDBFullAccess.
After making a few changes to the write function, the following code allowed me to write items to the database:
function writeDynamoItem(params, callback) {
const AWS = require('aws-sdk');
AWS.config.update({region: AWSregion});
const docClient = new AWS.DynamoDB.DocumentClient({region: 'eu-west-1'});
console.log('writing item to DynamoDB table');
docClient.put(params, function (err, data) {
if (err) {
callback(err, null)
console.error("Unable to write item. Error JSON:", JSON.stringify(err, null, 2))
} else {
callback(null, data)
}
});
}
To write to DynamoDB from a Lambda function (using Python) you must use the boto3 package and load the dynamodb resource:
Hope this helps, it is writing food and calories from the event and writing them with a generated uuid
import boto3
import os
import uuid
def writeToDynamo(event, context):
recordId = str(uuid.uuid4())
voice = event["food"]
text = event["calories"]
print('Generating new DynamoDB record, with ID: ' + recordId)
print('Input food: ' + food)
print('Input calories: ' + calories)
#Creating new record in DynamoDB table
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table(os.environ['DB_TABLE_NAME'])
table.put_item(
Item={
'id' : recordId,
'food' : food,
'calories' : calories
}
)
return recordId

AWS DynamoDB returns validation error when called from AWS Lambda

I'm using AWS Lambda and try to write something to AWS DynamoDB. I use the following code:
var tableName = "locations";
var item = {
deviceId: {
S: event.deviceId
},
timestamps: {
S: event.timestamp
}
}
var params = {
TableName: tableName,
Item: item
};
dynamo.putItem(params, function(err, data) {
if (err) {
context.fail(new Error('Error ' + err));
} else {
context.success(null);
}
});
And I get the following error:
returns Error ValidationException: One or more parameter values were invalid: Type mismatch for key deviceId expected: S actual: M
This happened because the aws sdk for Nodejs had changed!
If you are using:
var doc = require('dynamodb-doc');
var dynamo = new doc.DynamoDB();
Then the parameters to the putItem call (and most other calls) have changed and instead needs to be:
var tableName = "locations";
var item = {
deviceId: event.deviceId,
timestamp: event.timestamp,
latitude: Number(event.latitude),
longitude: Number(event.longitude)
}
var params = {
TableName: tableName,
Item: item
};
Read all about the new sdk here: https://github.com/awslabs/dynamodb-document-js-sdk