Stripe Error: No signatures found matching the expected signature for payload - google-cloud-platform

I have a stripe webhook that call a Firebase function. In this function I need to verify that this request comes from Stripe servers. Here is the code :
const functions = require('firebase-functions');
const bodyParser = require('body-parser');
const stripe = require("stripe")("sk_test_****");
const endpointSecret = 'whsec_****';
const app = require('express')();
app.use(bodyParser.json({
verify: function (req, res, buf) {
var url = req.originalUrl;
if (url.startsWith('/webhook')) {
req.rawBody = buf.toString()
}
}
}));
app.post('/webhook/example', (req, res) => {
let sig = req.headers["stripe-signature"];
try {
console.log(req.bodyRaw)
let event = stripe.webhooks.constructEvent(req.body, sig, endpointSecret);
console.log(event);
res.status(200).end()
// Do something with event
}
catch (err) {
console.log(err);
res.status(400).end()
}
});
exports.app = functions.https.onRequest(app);
As mentioned in Stripe Documentation, I have to use raw body to perform this security check.
I have tried with my current code and with :
app.use(require('body-parser').raw({type: '*/*'}));
But I always get this error :
Error: No signatures found matching the expected signature for payload. Are you passing the raw request body you received from Stripe? https://github.com/stripe/stripe-node#webhook-signing

Cloud Functions automatically parses body content of known types. If you're getting JSON, then it's already parsed and available to you in req.body. You shouldn't need to add other body parsing middleware.
If you need to process the raw data, you should use req.rawBody, but I don't think you'll need to do that here.

Here is what is working for me:
add this line:
app.use('/api/subs/stripe-webhook', bodyParser.raw({type: "*/*"}))
(The first argument specifies which route we should use the raw body parser on. See the app.use() reference doc.)
just before this line:
app.use(bodyParser.json());
(it doesn't affect all your operation, just this: '/api/subs/stripe-webhook')
Note: If you are using Express 4.16+ you can replace bodyParser by express:
app.use('/api/subs/stripe-webhook', express.raw({type: "*/*"}));
app.use(express.json());
Then:
const endpointSecret = 'whsec_........'
const stripeWebhook = async (req, res) => {
const sig = req.headers['stripe-signature'];
let eventSecure = {}
try {
eventSecure = stripe.webhooks.constructEvent(req.body, sig, endpointSecret);
//console.log('eventSecure :', eventSecure);
}
catch (err) {
console.log('err.message :', err.message);
res.status(400).send(`Webhook Secure Error: ${err.message}`)
return
}
res.status(200).send({ received: true });
}

Here is code which is working for me:
app.use(bodyParser.json({
verify: function (req, res, buf) {
var url = req.originalUrl;
if (url.startsWith('/stripe')) {
req.rawBody = buf.toString();
}
}
}));
And then pass the req.rawBody for verification
stripe.checkWebHook(req.rawBody, signature);
Reference: https://github.com/stripe/stripe-node/issues/341

2 things to note:
pass req.rawBody instead of req.body to constructEvent
const event = stripe.webhooks.constructEvent(
req.rawBody,
sig,
STRIPE_WEBHOOK_SECRET
);
Make sure you're using the correct webhook secret. It's unique per webhook url!

2021 - Solution
I faced that error, and after a lot research I could not figure out the problem easily, but finally I could do it based in my architecture below:
//App.js
this.server.use((req, res, next) => {
if (req.originalUrl.startsWith('/webhook')) {
next();
} else {
express.json()(req, res, next);
}
});
//routes.js
routes.post(
'/webhook-payment-intent-update',
bodyParser.raw({ type: 'application/json' }),
//your stripe logic (Im using a controller, but wherever)
(req, res) => {
stripe.webhooks.constructEvent(...)
}
)
Two big warnings to pay attention:
Make sure to send the req.headers['stripe-signature']
Make sure that your endpointSecret is right, if not it will still saying the same error
Tips:
Test it locally by installing the Stripe CLI: https://stripe.com/docs/webhooks/test
Verify your key on stripe dashboard or you can also make sure if you have the right key by verifying you stripe log as below:
I hope it helps you. :)

// Use JSON parser for all non-webhook routes
app.use(
bodyParser.json({
verify: (req, res, buf) => {
const url = req.originalUrl;
if (url.startsWith('/api/stripe/webhook')) {
req.rawBody = buf.toString();
}
}
})
);
The above code will look fine for the above answers. But even I was made one mistake. After put the same thing I got the same error.
Finally, I've figured it out if you're configured body-parser below the rawBody code then it'll work.
Like this
// Use JSON parser for all non-webhook routes
app.use(
bodyParser.json({
verify: (req, res, buf) => {
const url = req.originalUrl;
if (url.startsWith('/api/stripe/webhook')) {
req.rawBody = buf.toString();
}
}
})
);
// Setup express response and body parser configurations
app.use(express.json());
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
Hopefully, it'll help someone.

It is late but will help others
Github answer
const payload = req.body
const sig = req.headers['stripe-signature']
const payloadString = JSON.stringify(payload, null, 2);
const secret = 'webhook_secret';
const header = stripe.webhooks.generateTestHeaderString({
payload: payloadString,
secret,
});
let event;
try {
event = stripe.webhooks.constructEvent(payloadString, header, secret);
} catch (err) {
console.log(`Webhook Error: ${err.message}`)
return res.status(400).send(`Webhook Error: ${err.message}`);
}
switch (event.type) {
case 'checkout.session.completed': {
......
enter code here

If you are trying to add a stripe webhook into your NextJS API Route, here's how to do so (ref):
import initStripe from "stripe";
import { buffer } from "micro";
import { NextApiRequest, NextApiResponse } from "next";
export const config = { api: { bodyParser: false } };
const handler = async (req: NextApiRequest, res: NextApiResponse) => {
const stripe = initStripe(process.env.STRIPE_SECRET_KEY||'');
const signature = req.headers["stripe-signature"];
const signingSecret = process.env.STRIPE_WEBHOOK_SECRET || '';
const reqBuffer = await buffer(req);
let event;
try {
event = stripe.webhooks.constructEvent(reqBuffer, signature, signingSecret);
} catch (error: any) {
console.log(error);
return res.status(400).send(`Webhook error: ${error?.message}`);
}
console.log({ event });
res.send({ received: true });
};
export default handler;
This is using buffer from the micro library, in combination with the modifying the default API request to use request's rawbody. In some frameworks (like NextJs), rawBody doesn't come OOTB, hence the workaround of retrieving the rawbody by reqBuffer, which is needed in the stripe.webhooks.constructEvent event.

I was able to obtain data from one webhook but not from a second one: the problem was that the secret key I used was the same as the one used for the first webhook, but I found out that every webhook has a different key, that's way I got that same message.

AWS API Gateway + Lambda (Express.js CRUD) I'm using this for Stripe webhook endpoint and it works for me:
app.use(require('body-parser').text({ type: "*/*" }));

This happened to me when sending a test webhook from the Stripe dashboard after I had renamed a firebase cloud function. All my other functions were working fine. Solved by re-setting in the terminal
firebase functions:config:set stripe.webhook_signature="Your webhook signing secret"
(if you're using that) and redeploying the functions firebase deploy --only functions
On a second occasion I solved the problem by rolling the stripe signature in the stripe dashboard.

Please use this script
app.use(
bodyParser.json({
verify: (req, res, buf) => {
req.rawBody = buf;
},
})
);

My fave was combining two of above great answers.
Then you can use req.rawbody when you construct the event.
Replace "webhook" with whatever route you wish you have a raw body for.
app.use(
"/webhook",
express.json({
verify: (req, res, buf) => {
req.rawBody = buf.toString();
},
})
);
BEFORE
app.use(express.json());
Works well if you are using routes and controllers.

To use raw body in express with a specific endpoint in a seperated middleware, my solution is just enabling router to use express.raw for the webhook endpoint.
-node.js v12
-express.js v4.17.1
export const handleBodyRequestParsing = (router: Router): void => {
router.use('/your_webhook_endpoint', express.raw({ type: '*/*' }))
router.use(express.json({ limit: '100mb' }))
router.use(express.urlencoded({ extended: true }))
}

Here is the Quick Tip which may save your hours !
If you are adding express payment to your exciting express app sometimes you may already pass your request as json in the beginning of application by using express middleware app.use(json()); or any other middleware (Bodyparser for example).
If you are doing that then change that to omit your webhook url
Exmaple:
Assume your payment webhook url is /paments/webhhok
app.use((req, res, next) => {
if (req.originalUrl.includes("/payments/webhook")) {
next();
} else {
express.json()(req, res, next);
}
});

When using Stripe in Express, if you have the following line in your code;
app.use(express.json());
it is going to prevent you from providing the raw body to the Stripe even when you explicitly set "bodyParser.raw", which will throw an error. This was the reason my code failed. Finally sorted it out.

I tried all the solutions above and no one worked, and figured out that the only solution was not to use express at all for this endpoint. you just have to create another http function
export const webhook = functions.https.onRequest(async (req, res) => {
try {
const sig = req.headers['stripe-signature']
const endpointSecret = 'web_secret'
const event = stripe.webhooks.constructEvent(
req.rawBody,
sig,
endpointSecret
)
console.log(event.data.object)
res.status(200).send(event.data.object)
} catch (err) {
console.error('ocorreu um erro', err)
res.status(400).send(`Webhook Error: ${err.message}`)
}
})

Related

Forward uploaded file from a service to another service

I'm trying to create a REST API with ExpressJS that accept an image and pass it to another service (with a POST request) which is in charge to perform some operations (resize, etc..) and store into an AWS S3. I know that the same solution can be easily done with a Lambda Function directly but I have a K8s and I want to make worth it.
All components are already working with the exception of the service that forward the image to the second service.
The idea that I've found on internet is using a stream, but I got the exception Error: Expected a stream at Object.getStream [as default]
How can I solve that? Is the right practice or there is a better solution to achieve the same result?
const headers = req.headers;
const files: any = req.files
const filename = files[0].originalname;
const buffer = await getStream(files[0].stream)
const formFile = new FormData();
formFile.append('image', buffer, filename);
headers['Content-Type'] = 'multipart/form-data';
axios.post("http://localhost:1401/content/image/test/upload/", formFile, {
headers: headers,
})
.catch((error) => {
const { status, data } = error.response;
res.status(status).send(data);
})
I've found a solution that I post here for those who'll have the same problem.
Install form-data on node:
yarn add form-data
Then in your controller:
const headers = req.headers;
const files: any = req.files
const formFile = new FormData();
files.forEach((file: any) => {
const filename = file.originalname;
const buffer = file.buffer
formFile.append('image', buffer, filename);
})
// set the correct header otherwise it won't work
headers["content-type"] = `multipart/form-data; boundary=${formFile.getBoundary()}`
// now you can send the image to the second service
axios.post("http://localhost:1401/content/image/test/upload/", formFile, {
headers: headers,
})
.then((r : any) => {
res.sendStatus(r.status).end()
})
.catch((error) => {
const { status, data } = error.response;
res.status(status).send(data);
})

Step Function Triggered In a loop

I am starting a step function from Lambda and the Lambda function is tied to an API Gateway. For some reason, when I try to test the Lambda function, I see hundreds of executions failed and running in loop. I just triggered the step function once. I am missing something here. Can you please advise.
const AWS = require("aws-sdk");
const uuidv4 = require("uuid/v4");
/*----------------------------------------------------------------------- */
/* Implementation */
/*----------------------------------------------------------------------- */
exports.handler = async event => {
var _dt = await ExecuteStepFunction()
return _dt;
}
function ExecuteStepFunction() {
const stepFunctions = new AWS.StepFunctions();
return new Promise((res, rej) => {
var params = {
stateMachineArn: 'arn:aws:states:us-east-1:xxxxxxxxxxxxx:stateMachine:xxTestSateMachine',
input: JSON.stringify(''),
name: uuidv4()
};
stepFunctions.startExecution(params, function (err, data) {
if (err) {
rej(err);
}
else {
res(data);
}
});
});
}
I tried thIS approach provided in the this link (https://docs.aws.amazon.com/step-functions/latest/dg/tutorial-api-gateway.html) where the API gateway directly triggers the step function but I am receiving the following error. After trying to fix this, I move to the above option of starting the function using the API.
{
"__type": "com.amazon.coral.service#UnrecognizedClientException",
"message": "The security token included in the request is invalid"
}

Connect AWS mobile backend to DynamoDB

I am trying to use AWS mobile backend (using lambda function) to insert into dynamoDB (also configured at the mobile backend) but with no success so far.
The relevant code:
'use strict';
console.log("Loading function");
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region:process.env.MOBILE_HUB_PROJECT_REGION});
exports.handler = function(event, context, callback) {
var responseCode = 200;
var requestBody, pathParams, queryStringParams, headerParams, stage,
stageVariables, cognitoIdentityId, httpMethod, sourceIp, userAgent,
requestId, resourcePath;
console.log("request: " + JSON.stringify(event));
// Request Body
requestBody = event.body;
if (requestBody !== undefined && requestBody !== null) {
// Set 'test-status' field in the request to test sending a specific response status code (e.g., 503)
responseCode = JSON.parse(requestBody)['test-status'];
}
// Path Parameters
pathParams = event.path;
// Query String Parameters
queryStringParams = event.queryStringParameters;
// Header Parameters
headerParams = event.headers;
if (event.requestContext !== null && event.requestContext !== undefined) {
var requestContext = event.requestContext;
// API Gateway Stage
stage = requestContext.stage;
// Unique Request ID
requestId = requestContext.requestId;
// Resource Path
resourcePath = requestContext.resourcePath;
var identity = requestContext.identity;
// Amazon Cognito User Identity
cognitoIdentityId = identity.cognitoIdentityId;
// Source IP
sourceIp = identity.sourceIp;
// User-Agent
userAgent = identity.userAgent;
}
// API Gateway Stage Variables
stageVariables = event.stageVariables;
// HTTP Method (e.g., POST, GET, HEAD)
httpMethod = event.httpMethod;
// TODO: Put your application logic here...
let params = {
Item:{
"prop1":0,
"prop2":"text"
},
TableName:"testTable"
};
docClient.put(params, function(data, err){
if(err)
responseCode = 500;
else
{
responseCode = 200;
context.succeed(data);
}
});
// For demonstration purposes, we'll just echo these values back to the client
var responseBody = {
requestBody : requestBody,
pathParams : pathParams,
queryStringParams : queryStringParams,
headerParams : headerParams,
stage : stage,
stageVariables : stageVariables,
cognitoIdentityId : cognitoIdentityId,
httpMethod : httpMethod,
sourceIp : sourceIp,
userAgent : userAgent,
requestId : requestId,
resourcePath : resourcePath
};
var response = {
statusCode: responseCode,
headers: {
"x-custom-header" : "custom header value"
},
body: JSON.stringify(responseBody)
};
console.log("response: " + JSON.stringify(response))
context.succeed(response);
};
this doesn't put the item to the table for some reason.
I gave the necessary permissions using the roles part, anything I am missing?
**responseCode is only for testing purposes.
Edit:
tried AWS node.js lambda request dynamodb but no response (no err, no return data) and doesn't work either.
Edit2:
Added the full handler code. (it the default generated code when creating first AWS lambda).
I have refactored some bits of your code to look much simpler and use async/await (make sure to select Node 8.10 as the running environment for your function) instead of callbacks. I also got rid of the context and callback parameters, as they were used for older versions of NodeJS. Once you're using Node 8+, async/await should be the default option.
Also, it is possible to chain a .promise() on docClient.putItem, so you can easily await on it, making your code way simpler. I have left only the DynamoDB part (which is what is relevant to your question)
'use strict';
console.log("Loading function");
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region:process.env.MOBILE_HUB_PROJECT_REGION});
exports.handler = async (event) => {
let params = {
Item:{
"prop0":1,
"prop2":"text"
},
TableName:"testTable"
};
try {
await docClient.put(params).promise();
} catch (e) {
console.log(e)
return {
messsage: e.message
}
}
return { message: 'Data inserted successfully' };
};
Things to keep in mind if still it does not work:
Make sure your Lambda function has the right permissions to insert items on DynamoDB (AmazonDynamoDBFullAccess will do it)
You ALWAYS have to provide the partition key when inserting items to DynamoDB. On your example, the JSON only has two properties: prop1 and prop2. If none of them are the partition key, your code will certainly fail.
Make sure you table also exists
If you code fails, just check CloudWatch logs as any exception is now captured and printed out on the console.
The reason why no data is written in the table is because the call to DynamoDB put is asynchronous and will return by calling your callback. But during that time, the rest of the code continues to execute and your function eventually finish before the call to DynamoDB has a chance to complete.
You can use the await / async keywords to make your code sychronous :
async function writeToDynamoDB(params) {
return new Promise((resolve,reject) => {
docClient.put(params, function(data, err){
if(err)
reject(500);
else
resolve(data);
});
});
}
let params = ...
var data = await writeToDynamoDB(params)
You can find sample code I wrote (in Typescript) at https://github.com/sebsto/maxi80-alexa/blob/master/lambda/src/DDBController.ts

Is it possible to get project metadata in cloud function?

I want to get project-wide metadata set in compute engine within my GCP cloud function. Is this possible?
Here is my try:
metadata.js:
const request = require('request-promise');
async function getMetaData(attr) {
const url = `http://metadata.google.internal/computeMetadata/v1/project/attributes/${attr}`;
const options = {
headers: {
'Metadata-Flavor': 'Google'
}
};
return request(url, options)
.then(response => {
console.info(`Retrieve meta data successfully. meta data: ${response.body}`);
return response.body;
})
.catch(err => {
console.error('Retrieve meta data failed.', err);
});
}
async function retrieveMetaData() {
return {
IT_EBOOKS_API: await getMetaData('IT_EBOOKS_API')
};
}
module.exports = { getMetaData, retrieveMetaData };
cloud function index.js:
const { retrieveMetaData } = require('./metadata');
async function retrieveComputeMetadata(req, res) {
const envVars = await retrieveMetaData();
console.log('envVars: ', envVars);
res.status(200).json(envVars);
}
exports.retrieveComputeMetadata = retrieveComputeMetadata;
When I test the cloud function, the logs show me an error:
Retrieve meta data failed. { StatusCodeError: 404 - "404 page not found\n" at new StatusCodeError (/srv/node_modules/request-promise-core/lib/errors.js:32:15) at ....
It seems the url is not found.
The API you're looking to hit '..v1/project/attributes/' isn't available. As Cloud Functions run on GAE Standard the there are details around which endpoints are available here.

How to use onCall with aws-sdk-mock?

I would like let the mock method enable different responses for consecutive calls to the same method.
I found that Sinon has onCall, it allowed I can stub method like below,
let stubCall = sandbox.stub(Math, 'random');
stubCall.onCall(0).returns(Promise.resolve(0));
stubCall.onCall(1).returns(Promise.resolve(-1));
but I don't know how to let this work on AWS mock framework like this.
AWS.mock('CloudFormation', 'describeStacks', Promise.resolve(stackResponse));
I tried
AWS.mock('CloudFormation', 'describeStacks', Promise.resolve(stackResponse)).onCall(0).returns(Promise.resolve(res));
and
let mockCall = AWS.mock('CloudFormation', 'describeStacks', Promise.resolve(res));
mockCall.onCall(0).returns(Promise.resolve(res));
both of them didn't work.
I found people discuss this issue , mentioned since this aws-mock use sinon, it should able to use onCall. Is anyone use it successfully?
Since I use promise, I don't know what else I can do to return the different response for the same method has been called several times.
First, set the AWS SDK instance to be mocked
const sinon = require('sinon');
const AWS_Mock = require('aws-sdk-mock');
const AWS_SDK = require('aws-sdk');
AWSMock.setSDKInstance(AWS_SDK);
Configure stub that will be called
const stub = sinon.stub();
stub.onCall(0).returns(1);
stub.onCall(1).returns(2);
Mock Service method
Make sure that you're mocking the exact signature of the method.
AWSMock.mock('CloudFormation', 'describeStacks', function(params, cb) {
cb(null, stub());
});
Our Mocked method in action
const cf = new AWS_SDK.CloudFormation();
cf.describeStacks({}, (err, data) => {
if(err) {
console.err(err);
}
console.log(data); // 1
});
cf.describeStacks({}, (err, data) => {
if (err) {
console.err(err);
}
console.log(data); // 2
});