I tried every possible solution on the internet with no hope
What I am trying to do is simply use aws lambda functions (through the aws console) to fetch user fcm token from lets say DynamoDB (not included in the question), use that token to create endpointArn, send push to that specific device
I tested to send Using SNS console and the push gets to the device successfully but I failed to get it to the device using Lambda functions although it gives success status and message ID
Here is the code I used
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Set region
AWS.config.update({region: 'us-east-1'});
const sns = new AWS.SNS()
const sampleMessage = {
"GCM": {
"notification": {
"body": "Sample message for Android endpoints",
"title":"Title Test"
}
}
}
exports.handler = async (event) => {
const snsPayload = JSON.stringify(sampleMessage);
const response = {
statusCode: 200,
body: JSON.stringify('Hello from Lambda!'),
};
const params = {
PlatformApplicationArn: '<Platform Arn>',
Token: '<FCM Token>'
};
try {
const endpointData = await sns.createPlatformEndpoint(params).promise();
const paramsMessage = {
Message: snsPayload,
TargetArn: endpointData.EndpointArn
};
var publishTextPromise = await sns.publish(paramsMessage).promise();
response.MessageId = publishTextPromise.MessageId;
response.result = 'Success';
}
catch (e) {
console.log(e.stack)
response.result = 'Error'
}
return response;
};
After some trials and errors I figured out the solution for my own question
1- The GCM part of the payload should be a string not a json
2- The message parameter should have an attribute that explicitly sets the mime type of the payload to Json
Taking all that into consideration
const GCM_data = {
'notification': {
'body': 'Hellow from lambda function',
'title': 'Notification Title'
}
}
const data = {
"GCM": JSON.stringify(GCM_data)
}
const snsPayload = JSON.stringify(data)
and the params should look like
const paramsMessage = {
Message: snsPayload,
TargetArn: endpointData.EndpointArn,
MessageStructure: 'json'
};
and this will work :)
Related
I've been trying to solve this problem all day, looking everywhere on the web, even in the official AWS documentation, why does this error keep appearing when I try to send a message to the client through this code?
PS. i use SDK v3 with node.js 18
import {
ApiGatewayManagementApiClient,
PostToConnectionCommand,
} from "#aws-sdk/client-apigatewaymanagementapi";
export const handler = async (event) => {
const domain = event.requestContext.domainName;
const stage = event.requestContext.stage;
const connectionId = event.requestContext.connectionId;
const callbackUrl = `https://${domain}/${stage}`;
const client = new ApiGatewayManagementApiClient({ endpoint: callbackUrl });
const requestParams = {
ConnectionId: connectionId,
Data: "Hello!",
};
const command = new PostToConnectionCommand(requestParams);
try {
await client.send(command);
} catch (error) {
console.log(error);
}
return {
statusCode: 200,
};
};
fef17825-58ce-4ca7-8f38-85857f1aef0a Task timed out after 3.01 seconds
i tried any online guide or video, can anyone help me?
I am using the API Gateway Websocket.
When I send a message to the client from a Lambda function, it sometimes doesn't send a message and other times it sends 2-3 messages.
const AWS = require("aws-sdk")
const api = new AWS.ApiGatewayManagementApi({
endpoint : process.env.API_ENDPOINT
})
exports.handler = async (event) => {
console.log(event)
const body = JSON.parse(event.body)
const connectionId = event.requestContext.connectionId
sendMessage(connectionId, "My Message")
return {}
};
const sendMessage = (connectionId, response) => {
const data = { message : response }
const params = {
ConnectionId : connectionId,
Data : Buffer.from(JSON.stringify(data))
}
return api.postToConnection(params).promise()
}
Actually i had to await the sendMessage function. This solved my problem.
CURRENTLY
I am trying to get AWS Textract working for images supplied from a function in Google Scripts, that is sent to a Lambda resolved. I am following documentation on https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Textract.html#analyzeDocument-property
My Google Scripts code:
function googleFunction(id) {
let file = DriveApp.getFileById(id);
console.log("File is a " + file.getMimeType());
let blob = file.getBlob();
let params = {
doc: blob,
};
var options = {
method: "PUT",
"Content-Type": "application/json",
payload: JSON.stringify(params),
};
let response = UrlFetchApp.fetch("https://api-path/prod/resolver", options);
}
My Lambda resolver code:
"use strict";
const AWS = require("aws-sdk");
exports.handler = async (event) => {
let params = JSON.parse(event.body);
console.log("Parse as document...");
let textract = new AWS.Textract();
let doc = params["doc"];
let config = {
Document: {
Bytes: doc,
FeatureTypes: ["TABLES"],
}
};
textract.analyzeDocument(config, function (err, data) {
console.log("analyzing...");
if (err) {
console.log(err, err.stack);
}
// an error occurred
else {
console.log("data:" + JSON.stringfy(data));
} // successful response
});
};
ISSUE
File is successfully sent from Google Scripts to Lambda, but the following error is returned:
"errorType": "InvalidParameterType",
"errorMessage": "Expected params.Document.Bytes to be a string, Buffer, Stream, Blob, or typed array object"
Questions
Is there a way of verifying what the format of the doc variable is, to ensure it meets AWS Textract's requirements?
Can anyone see a possible cause for the errors being returned?
NOTES
Textract works fine when the same file is uploaded to an S3 bucked, and supplied in the config using:
S3Object: { Bucket: 'bucket_name', Name: 'file_name' }
I have confirmed the file is a JPEG
Got it working with 2 changes:
added getBytes() to Google side code
added Buffer.from() to AWS side code
My Google Scripts code:
function googleFunction(id) {
let file = DriveApp.getFileById(id);
console.log("File is a " + file.getMimeType());
let blob = file.getBlob().getBytes();
let params = {
doc: blob,
};
var options = {
method: "PUT",
"Content-Type": "application/json",
payload: JSON.stringify(params),
};
let response = UrlFetchApp.fetch("https://api-path/prod/resolver", options);
}
My Lambda resolver code:
"use strict";
const AWS = require("aws-sdk");
exports.handler = async (event) => {
let params = JSON.parse(event.body);
console.log("Parse as document...");
let textract = new AWS.Textract();
let doc = params["doc"];
let config = {
Document: {
Bytes: Buffer.from(doc),
FeatureTypes: ["TABLES"],
}
};
textract.analyzeDocument(config, function (err, data) {
console.log("analyzing...");
if (err) {
console.log(err, err.stack);
}
// an error occurred
else {
console.log("data:" + JSON.stringfy(data));
} // successful response
});
};
I am trying to use AWS mobile backend (using lambda function) to insert into dynamoDB (also configured at the mobile backend) but with no success so far.
The relevant code:
'use strict';
console.log("Loading function");
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region:process.env.MOBILE_HUB_PROJECT_REGION});
exports.handler = function(event, context, callback) {
var responseCode = 200;
var requestBody, pathParams, queryStringParams, headerParams, stage,
stageVariables, cognitoIdentityId, httpMethod, sourceIp, userAgent,
requestId, resourcePath;
console.log("request: " + JSON.stringify(event));
// Request Body
requestBody = event.body;
if (requestBody !== undefined && requestBody !== null) {
// Set 'test-status' field in the request to test sending a specific response status code (e.g., 503)
responseCode = JSON.parse(requestBody)['test-status'];
}
// Path Parameters
pathParams = event.path;
// Query String Parameters
queryStringParams = event.queryStringParameters;
// Header Parameters
headerParams = event.headers;
if (event.requestContext !== null && event.requestContext !== undefined) {
var requestContext = event.requestContext;
// API Gateway Stage
stage = requestContext.stage;
// Unique Request ID
requestId = requestContext.requestId;
// Resource Path
resourcePath = requestContext.resourcePath;
var identity = requestContext.identity;
// Amazon Cognito User Identity
cognitoIdentityId = identity.cognitoIdentityId;
// Source IP
sourceIp = identity.sourceIp;
// User-Agent
userAgent = identity.userAgent;
}
// API Gateway Stage Variables
stageVariables = event.stageVariables;
// HTTP Method (e.g., POST, GET, HEAD)
httpMethod = event.httpMethod;
// TODO: Put your application logic here...
let params = {
Item:{
"prop1":0,
"prop2":"text"
},
TableName:"testTable"
};
docClient.put(params, function(data, err){
if(err)
responseCode = 500;
else
{
responseCode = 200;
context.succeed(data);
}
});
// For demonstration purposes, we'll just echo these values back to the client
var responseBody = {
requestBody : requestBody,
pathParams : pathParams,
queryStringParams : queryStringParams,
headerParams : headerParams,
stage : stage,
stageVariables : stageVariables,
cognitoIdentityId : cognitoIdentityId,
httpMethod : httpMethod,
sourceIp : sourceIp,
userAgent : userAgent,
requestId : requestId,
resourcePath : resourcePath
};
var response = {
statusCode: responseCode,
headers: {
"x-custom-header" : "custom header value"
},
body: JSON.stringify(responseBody)
};
console.log("response: " + JSON.stringify(response))
context.succeed(response);
};
this doesn't put the item to the table for some reason.
I gave the necessary permissions using the roles part, anything I am missing?
**responseCode is only for testing purposes.
Edit:
tried AWS node.js lambda request dynamodb but no response (no err, no return data) and doesn't work either.
Edit2:
Added the full handler code. (it the default generated code when creating first AWS lambda).
I have refactored some bits of your code to look much simpler and use async/await (make sure to select Node 8.10 as the running environment for your function) instead of callbacks. I also got rid of the context and callback parameters, as they were used for older versions of NodeJS. Once you're using Node 8+, async/await should be the default option.
Also, it is possible to chain a .promise() on docClient.putItem, so you can easily await on it, making your code way simpler. I have left only the DynamoDB part (which is what is relevant to your question)
'use strict';
console.log("Loading function");
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region:process.env.MOBILE_HUB_PROJECT_REGION});
exports.handler = async (event) => {
let params = {
Item:{
"prop0":1,
"prop2":"text"
},
TableName:"testTable"
};
try {
await docClient.put(params).promise();
} catch (e) {
console.log(e)
return {
messsage: e.message
}
}
return { message: 'Data inserted successfully' };
};
Things to keep in mind if still it does not work:
Make sure your Lambda function has the right permissions to insert items on DynamoDB (AmazonDynamoDBFullAccess will do it)
You ALWAYS have to provide the partition key when inserting items to DynamoDB. On your example, the JSON only has two properties: prop1 and prop2. If none of them are the partition key, your code will certainly fail.
Make sure you table also exists
If you code fails, just check CloudWatch logs as any exception is now captured and printed out on the console.
The reason why no data is written in the table is because the call to DynamoDB put is asynchronous and will return by calling your callback. But during that time, the rest of the code continues to execute and your function eventually finish before the call to DynamoDB has a chance to complete.
You can use the await / async keywords to make your code sychronous :
async function writeToDynamoDB(params) {
return new Promise((resolve,reject) => {
docClient.put(params, function(data, err){
if(err)
reject(500);
else
resolve(data);
});
});
}
let params = ...
var data = await writeToDynamoDB(params)
You can find sample code I wrote (in Typescript) at https://github.com/sebsto/maxi80-alexa/blob/master/lambda/src/DDBController.ts
I am trying to create HTTP API in Cloud Function - that eventually published a message t PubSub. Understood, that there is PubSub REST API - but it enforced me to set up the authentication (in client side) - that I would like to skip and move it to the server side.
Below code is deployed as Google Cloud Function with this command gcloud functions deploy helloGET --runtime nodejs8 --trigger-http
But while tested in browser, it is errored out Error: could not handle the request
Any suggestion is appreciated, thanks!
"use strict";
// [START functions_pubsub_setup]
const { PubSub } = require("#google-cloud/pubsub");
// Instantiates a client
const pubsub = new PubSub();
// [END functions_pubsub_setup]
const Buffer = require("safe-buffer").Buffer;
exports.helloGET = (req, res) => {
const topic = pubsub.topic("projects/myproject/topics/openit");
const message = {
data: {
message: "req.body.message"
}
};
// Publishes a message
res.send(
topic
.publish(message)
.then(() => res.status(200).send("Message published."))
.catch(err => {
err = `Catch block ... ${err}`;
console.error(err);
res.status(500).send(err);
return Promise.reject(err);
})
);
};
Below code will work. But it will take around 30 seconds or plus for the subscriber to receive the event - it is way too slow for my used case :S
"use strict";
const { PubSub } = require("#google-cloud/pubsub");
const pubsub = new PubSub();
const Buffer = require("safe-buffer").Buffer;
exports.helloGET = async (req, res) => {
var toPublish = `hello ${Date.now()}`;
publishMessage("_REPLACE_WITH_TOPIC_NAME_", toPublish);
res.send(`Published ${toPublish}`);
};
async function publishMessage(topicName, data) {
console.log("=> publishMessage, data = ", data);
const dataBuffer = Buffer.from(data);
const topic = pubsub.topic(topicName);
const publisher = topic.publisher();
publisher.publish(dataBuffer, { a: "XYZ" }, function() {
console.log("Published eventually ...");
});
}