npm run custom-tests - sendHttpRequest failed non200Response - unit-testing

I'm trying to setup some SailsJS boilerplate I'm finding on the web. My code is below, I am thinking it is due to my config in routes where I set:
'GET /.temporary/csrf/token/for/tests': { action: 'security/grant-csrf-token' }
I don't have any thing in api/controllers/ for security/grant-csrf-token, do I have to generate such a thing?
const sails = require('sails');
before(function(done) {
// Increase the Mocha timeout so that Sails has enough time to lift, even if you have a bunch of assets.
this.timeout(10000);
try {
// Note that we mix in env vars, CLI opts, and the .sailsrc file using
// the `.getRc()` method, if possible. But also note that we mix in
// a few additional overrides to remove clutter from test output, ensure
// we are working with a clean database, etc.
const configOverrides = sails.getRc();
sails.lift(
{
// Your sails app's configuration files will be loaded automatically,
// but you can also specify any other special overrides here for testing purposes.
// For example, we might want to skip the Grunt hook,
// and disable all logs except errors and warnings:
...configOverrides,
hooks: {
...configOverrides.hooks,
grunt: false
},
models: {
...configOverrides.models,
migrate: 'drop'
},
log: {
...configOverrides.log,
level: 'warn'
},
routes: {
...configOverrides.routes,
// Provide a way to get a CSRF token:
'GET /.temporary/csrf/token/for/tests': { action: 'security/grant-csrf-token' }
},
policies: {
...configOverrides.policies,
// Poke a hole in any global policies to ensure the test runner can
// actually get access to a CSRF token.
'security/grant-csrf-token': true,
},
datastores: {
...configOverrides.datastores,
default: {
...(configOverrides.datastores || {}).default
// To have the tests run against a local mysql database, for example,
// add configuration here: (e.g. uncomment the two lines below)
// adapter: 'sails-mysql',
// url: 'mysql://root#127.0.0.1:3306/pba',
}
}
},
err => {
if (err) return done(err);
// First, get a cookie and a CSRF token.
sails.helpers.http.sendHttpRequest.with({
method: 'GET',
url: '/.temporary/csrf/token/for/tests',
baseUrl: sails.config.custom.baseUrl
}).exec((err, serverResponse) => {
if (err) return done(new Error('Test runner could not fetch CSRF token.\nDetails:\n' + err.stack));
When I run npm run custom-test this I get the following error:
PS C:\Users\Mercurius\Documents\GitHub\Homie-Web> npm run custom-tests
> homie#0.0.0 custom-tests C:\Users\Mercurius\Documents\GitHub\Homie-Web
> node ./node_modules/mocha/bin/mocha test/lifecycle.test.js test/integration/**/*.test.js
- GET //.temporary/csrf/token/for/tests (15ms 404)
1) "before all" hook
0 passing (3s)
1 failing
1) "before all" hook:
Test runner could not fetch CSRF token.
Details:
Exception: `sendHttpRequest` failed ("non200Response"). A non-2xx status code was returned from the server.
Server response:
{ statusCode: 404,
headers:
{ 'x-powered-by': 'Sails <sailsjs.com>',
'content-type': 'text/plain; charset=utf-8',
'content-length': '9',
etag: 'W/"9-0gXL1ngzMqISxa6S1zx3F4wtLyg"',
'set-cookie':
[ 'sails.sid=s%3AbfUzLBhmfPZ7EsQM9kSV4Bg2y_s0lA0e.b382JvWtrDt1pZhRuafObxxkoqwQaaFlOPqXrYmzNn0; Path=/; HttpOnly' ],
date: 'Wed, 05 Dec 2018 17:26:50 GMT',
connection: 'close' },
body: 'Not Found' }
at sails.lift.err (C:\Users\Mercurius\Documents\GitHub\Homie-Web\test\lifecycle.test.js:64:48)
at whenSailsIsReady (C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\sails\lib\app\lift.js:127:12)
at C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:3861:9
at C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:421:16
at replenish (C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:941:25)
at iterateeCallback (C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:931:17)
at C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:906:16
at C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:3858:13
at C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\sails\lib\app\private\initialize.js:91:14
at C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:421:16
at iteratorCallback (C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:998:13)
at C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:906:16
at expressListening (C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\sails\lib\hooks\http\start.js:169:14)
at C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:421:16
at processQueue (C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:1565:20)
at taskComplete (C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:1588:9)
at C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:1612:17
at C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:906:16
at async.auto.verify (C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\sails\lib\hooks\http\start.js:160:9)
at runTask (C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:1619:13)
at C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:1559:13
at processQueue (C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:1569:13)
at taskComplete (C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:1588:9)
at C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:1612:17
at C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\async\dist\async.js:906:16
at Server.<anonymous> (C:\Users\Mercurius\Documents\GitHub\Homie-Web\node_modules\sails\lib\hooks\http\start.js:38:20)
at Object.onceWrapper (events.js:313:30)
at emitNone (events.js:111:20)
at Server.emit (events.js:208:7)
at emitListeningNT (net.js:1387:10)
at _combinedTickCallback (internal/process/next_tick.js:136:11)
at process._tickCallback (internal/process/next_tick.js:181:9)

You test fails on retrieving request from GET //.temporary/csrf/token/for/tests while you define your route as GET /.temporary/csrf/token/for/tests
So you definitely call a wrong URL. It happens while you're using a helper
sails.helpers.http.sendHttpRequest.with({
method: 'GET',
url: '/.temporary/csrf/token/for/tests',
baseUrl: sails.config.custom.baseUrl
})
I guess you either has / in your baseUrl or you add it inside a helper.
So what you need is just to call the right URL

Related

How to initialize ApolloClient in SvelteKit to work on both SSR and client side

I tried but didn't work. Got an error: Error when evaluating SSR module /node_modules/cross-fetch/dist/browser-ponyfill.js:
<script lang="ts">
import fetch from 'cross-fetch';
import { ApolloClient, InMemoryCache, HttpLink } from "#apollo/client";
const client = new ApolloClient({
ssrMode: true,
link: new HttpLink({ uri: '/graphql', fetch }),
uri: 'http://localhost:4000/graphql',
cache: new InMemoryCache()
});
</script>
With SvelteKit the subject of CSR vs. SSR and where data fetching should happen is a bit deeper than with other somewhat "similar" solutions. The bellow guide should help you connect some of the dots, but a couple of things need to be stated first.
To define a server side route create a file with the .js extension anywhere in the src/routes directory tree. This .js file can have all the import statements required without the JS bundles that they reference being sent to the web browser.
The #apollo/client is quite huge as it contains the react dependency. Instead, you might wanna consider importing just the #apollo/client/core even if you're setting up the Apollo Client to be used only on the server side, as the demo bellow shows. The #apollo/client is not an ESM package. Notice how it's imported bellow in order for the project to build with the node adapter successfully.
Try going though the following steps.
Create a new SvelteKit app and choose the 'SvelteKit demo app' in the first step of the SvelteKit setup wizard. Answer the "Use TypeScript?" question with N as well as all of the questions afterwards.
npm init svelte#next demo-app
cd demo-app
Modify the package.json accordingly. Optionally check for all packages updates with npx npm-check-updates -u
{
"name": "demo-app",
"version": "0.0.1",
"scripts": {
"dev": "svelte-kit dev",
"build": "svelte-kit build --verbose",
"preview": "svelte-kit preview"
},
"devDependencies": {
"#apollo/client": "^3.3.15",
"#sveltejs/adapter-node": "next",
"#sveltejs/kit": "next",
"graphql": "^15.5.0",
"node-fetch": "^2.6.1",
"svelte": "^3.37.0"
},
"type": "module",
"dependencies": {
"#fontsource/fira-mono": "^4.2.2",
"#lukeed/uuid": "^2.0.0",
"cookie": "^0.4.1"
}
}
Modify the svelte.config.js accordingly.
import node from '#sveltejs/adapter-node';
export default {
kit: {
// By default, `npm run build` will create a standard Node app.
// You can create optimized builds for different platforms by
// specifying a different adapter
adapter: node(),
// hydrate the <div id="svelte"> element in src/app.html
target: '#svelte'
}
};
Create the src/lib/Client.js file with the following contents. This is the Apollo Client setup file.
import fetch from 'node-fetch';
import { ApolloClient, HttpLink } from '#apollo/client/core/core.cjs.js';
import { InMemoryCache } from '#apollo/client/cache/cache.cjs.js';
class Client {
constructor() {
if (Client._instance) {
return Client._instance
}
Client._instance = this;
this.client = this.setupClient();
}
setupClient() {
const link = new HttpLink({
uri: 'http://localhost:4000/graphql',
fetch
});
const client = new ApolloClient({
link,
cache: new InMemoryCache()
});
return client;
}
}
export const client = (new Client()).client;
Create the src/routes/qry/test.js with the following contents. This is the server side route. In case the graphql schema doesn't have the double function specify different query, input(s) and output.
import { client } from '$lib/Client.js';
import { gql } from '#apollo/client/core/core.cjs.js';
export const post = async request => {
const { num } = request.body;
try {
const query = gql`
query Doubled($x: Int) {
double(number: $x)
}
`;
const result = await client.query({
query,
variables: { x: num }
});
return {
status: 200,
body: {
nodes: result.data.double
}
}
} catch (err) {
return {
status: 500,
error: 'Error retrieving data'
}
}
}
Add the following to the load function of routes/todos/index.svelte file within <script context="module">...</script> tag.
try {
const res = await fetch('/qry/test', {
method: 'POST',
credentials: 'same-origin',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
num: 19
})
});
const data = await res.json();
console.log(data);
} catch (err) {
console.error(err);
}
Finally execute npm install and npm run dev commands. Load the site in your web browser and see the server side route being queried from the client whenever you hover over the TODOS link on the navbar. In the console's network tab notice how much quicker is the response from the test route on every second and subsequent request thanks to the Apollo client instance being a singleton.
Two things to have in mind when using phaleth solution above: caching and authenticated requests.
Since the client is used in the endpoint /qry/test.js, the singleton pattern with the caching behavior makes your server stateful. So if A then B make the same query B could end up seeing some of A data.
Same problem if you need authorization headers in your query. You would need to set this up in the setupClient method like so
setupClient(sometoken) {
...
const authLink = setContext((_, { headers }) => {
return {
headers: {
...headers,
authorization: `Bearer ${sometoken}`
}
};
});
const client = new ApolloClient({
credentials: 'include',
link: authLink.concat(link),
cache: new InMemoryCache()
});
}
But then with the singleton pattern this becomes problematic if you have multiple users.
To keep your server stateless, a work around is to avoid the singleton pattern and create a new Client(sometoken) in the endpoint.
This is not an optimal solution: it recreates the client on each request and basically just erases the cache. But this solves the caching and authorization concerns when you have multiple users.

Why is AWS Lambda, API Gateway returning a CORS error

I know this issue has been covered in many posts all over the web and I think I've tried them all, but I'm still getting a 403 CORS error to my local react app.
Here are in part, the Headers from Dev Tools:
#GENERAL:
Request URL: https://<myGatewayApiUrl>.amazonaws.com/dev/api/byid/1/129
Request Method: OPTIONS
Status Code: 403
#RESPONSE HEADERS
access-control-allow-headers: Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token
access-control-allow-methods: GET,OPTIONS
access-control-allow-origin: *
content-length: 42
content-type: application/json
I've been working in the API Gateway setting the Enable CORS, but I get an error for one get methods Add Access-Control-Allow-Origin Integration Response Header Mapping to GET method -> invalid response status code specified - But the OPTIONS headers get set and the GET header Access-Control-Allow-Origin is set.
I am using express and cors packages, here's a snippet from my API index.js file:
const app = express();
app.use(cors());
app.options('*', cors());
Here is the request code from React app:
export const getRecordById = async (userId, id, token) => {
try {
const response = await axios.get(
process.env.REACT_APP_API_URL + `/byid/${userId}/${id}`,
{
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json',
},
}
);
return response.data;
} catch (error) {
console.log('ERROR', error);
return error;
}
};
Here is my response code from the Lambda API:
getById: asyncHandler(async (req, res, next) => {
const { user, id } = req.params;
const result = await recordsService.getRecordById(user, id);
res.set({
'content-type': 'application/json',
'Access-Control-Allow-Origin': '*',
});
if (!result) {
res.status(400).json({
error: true,
message: 'get record by ID action failed',
data: {},
});
}
res.status(200).json({
error: false,
message: 'successful record retrieval',
data: {
record: result,
},
});
}),
Also, I have my serverless.yml file http events set as such: (from what I understand cors: true should handle the preflight requests)
- http:
path: /api/records/byid/{user}/{id}
method: GET
cors: true
I've spent way too much time trying to figure this out. It must be something simple and dumb, am I using res.set() properly? Everything looks correct, I know I'm missing something. Thanks
API Gateway will reject the call with a CORS error when a URL is not found by default.
It looks like Axios is missing the /records bit from the request URL.

gatsby-source-graphql does not seem to pass cookie header, how to solve?

How to pass Cookie headers from gatsby-source-graphql?
I'm using the gatsby-source-graphql (https://github.com/gatsbyjs/gatsby/tree/master/packages/gatsby-source-graphql) and recently had to implement AWS Cloudfront Signed Cookies to authorise users to acccess a private staging environment, for this reason the requests to the graphql endpoint, handled by the plugin, need to have the cookie in the request header, which I do by:
{
resolve: 'gatsby-source-graphql',
options: {
cookie: 'var1=val1; var2=val2; '
}
}
The above fails,
ServerParseError: Unexpected token < in JSON at position 0
If disabling Signed Cookies and making the endpoint public, it works.
And, if I keep it private again and test with curl, works:
curl --cookie 'var1=val1; var2=val2; ' graphql_endpoint.com
I tried to figure out why the Cookie header is not passed, but seems that the problem is in a different plugin that the plugin above uses called 'apollo-link-http' (https://github.com/gatsbyjs/gatsby/blob/master/packages/gatsby-source-graphql/src/gatsby-node.js)
Meanwhile, looking at the apollo-http-link (https://www.apollographql.com/docs/link/links/http/) and a issue reported here (https://github.com/apollographql/apollo-client/issues/4455), I tried:
{
resolve: 'gatsby-source-graphql',
options: {
typeName: 'FOOBAR',
fieldName: 'foobar',
createLink: (pluginOptions) => {
return createHttpLink({
uri: process.env.GATSBY_GRAPHQL_API_URL,
credentials: 'include',
headers: {
cookie: "CloudFront-Policy=xxxxx_; CloudFront-Key-Pair-Id=xxxxx; CloudFront-Signature=xxxxxxxxxx; path=/;",
},
fetch,
})
},
}
},
Without success, the same error as before.
Also tried to use the fetch options for node-fetch,
{
resolve: 'gatsby-source-graphql',
options: {
typeName: 'FOOBAR',
fieldName: 'foobar',
url: process.env.GATSBY_GRAPHQL_API_URL,
fetchOptions: {
credentials: 'include',
headers: {
cookie: "CloudFront-Policy=xxxxx_; CloudFront-Key-Pair-Id=xxxxx; CloudFront-Signature=xxxxxxxxxx; path=/;",
},
},
}
},
As you can see fetchOptions here (https://github.com/gatsbyjs/gatsby/blob/master/packages/gatsby-source-graphql/src/gatsby-node.js)
No success! This is probably a bug.
After spending a lot of time looking at the docs and other reports, I found a solution based on the attempts I've originally posted.
I started by looking at the browser version, and check the cookie header property name to avoid any typos. Which I've determined it should be "Cookie", as most examples I found mention '.cookie', etc.
With that said, I've checked the documentation for all the related packages and source code:
https://github.com/gatsbyjs/gatsby/tree/master/packages/gatsby-source-graphql
https://github.com/gatsbyjs/gatsby/blob/master/packages/gatsby-source-graphql/src/gatsby-node.js
https://www.apollographql.com/docs/link/links/http/
https://github.com/apollographql/apollo-client/issues/4455
Finally, I declared the headers cookie parameter and in a separate property, the options for the node-fetch package:
https://github.com/bitinn/node-fetch
The result:
{
resolve: 'gatsby-source-graphql',
options: {
typeName: 'FOOBAR',
fieldName: 'foobar',
url: process.env.GATSBY_GRAPHQL_API_URL,
headers: {
Cookie: 'CloudFront-Policy=xxxxx_; CloudFront-Key-Pair-Id=xxxxx; CloudFront-Signature=xxxxxxxxxx; path=/;'
},
credentials: 'include',
}
},
What happens above, is that the "credentials include" allow cross-browser origin requests and enables cookies (https://www.apollographql.com/docs/react/networking/authentication/#cookie)
Hope that this helps someone else in the future, as it's not trivial.

Found #client directives in a query but no ApolloClient resolvers were specified

OS: Windows 10 Pro
apollo-client: 2.6.3
apollo-boost: 0.1.16
Can anyone explain why I'm getting the following error message?:
Found #client directives in a query but no ApolloClient resolvers were
specified. This means ApolloClient local resolver handling has been
disabled, and #client directives will be passed through to your link
chain.
when I've defined my ApolloClient as follows:
return new ApolloClient({
uri: process.env.NODE_ENV === 'development' ? endpoint : prodEndpoint,
request: operation => {
operation.setContext({
fetchOptions: {
credentials: 'include',
},
headers: { cookie: headers && headers.cookie },
});
},
// local data
clientState: {
resolvers: {
Mutation: {
toggleCart(_, variables, { cache }) {
// Read the cartOpen value from the cache
const { cartOpen } = cache.readQuery({
query: LOCAL_STATE_QUERY,
});
// Write the cart State to the opposite
const data = {
data: { cartOpen: !cartOpen },
};
cache.writeData(data);
return data;
},
},
},
defaults: {
cartOpen: false,
},
},
});
From the docs:
If you're interested in integrating local state handling capabilities with Apollo Client < 2.5, please refer to our (now deprecated) apollo-link-state project. As of Apollo Client 2.5, local state handling is baked into the core, which means it is no longer necessary to use apollo-link-state
The clientState config option was only used with apollo-link-state. You need to add the resolvers directly to the config as shown in the docs:
new ApolloClient({
uri: '/graphql',
resolvers: { ... },
})
Also note that there is no defaults option anymore -- the cache should be initialized by calling writeData directly on the cache instance (see here).
I would suggest going through the latest docs and avoiding any examples from external sources (like existing repos or tutorials) since these may be outdated.
Note: As of version 3.0, writeData was removed in favor of writeFragment and writeQuery.

Google Cloud Functions enable CORS?

I just finished the Hello World Google Cloud Functions tutorial and received the following response headers:
Connection → keep-alive
Content-Length → 14
Content-Type → text/plain; charset=utf-8
Date → Mon, 29 Feb 2016 07:02:37 GMT
Execution-Id → XbT-WC9lXKL-0
Server → nginx
How can I add the CORS headers to be able to call my function from my website?
here we go:
exports.helloWorld = function helloWorld(req, res) {
res.set('Access-Control-Allow-Origin', "*")
res.set('Access-Control-Allow-Methods', 'GET, POST');
if (req.method === "OPTIONS") {
// stop preflight requests here
res.status(204).send('');
return;
}
// handle full requests
res.status(200).send('weeee!);
};
then you can jquery/whatever it as usual:
$.get(myUrl, (r) => console.log(r))
I'm the product manager for Google Cloud Functions. Thanks for your question, this has been a popular request.
We don't have anything to announce just yet, but we're aware of several enhancements that need to be made to the HTTP invocation capabilities of Cloud Functions and we'll be rolling out improvements to this and many other areas in future iterations.
UPDATE:
We've improved the way you deal with HTTP in Cloud Functions. You now have full access to the HTTP Request/Response objects so you can set the appropriate CORS headers and respond to pre-flight OPTIONS requests (https://cloud.google.com/functions/docs/writing/http)
UPDATE (2022):
Just noticed there was a question about docs, and our docs have moved. Updated docs for CORS are here:
https://cloud.google.com/functions/docs/samples/functions-http-cors
You can use the CORS express middleware.
package.json
npm install express --save
npm install cors --save
index.js
'use strict';
const functions = require('firebase-functions');
const express = require('express');
const cors = require('cors')({origin: true});
const app = express();
app.use(cors);
app.get('*', (req, res) => {
res.send(`Hello, world`);
});
exports.hello = functions.https.onRequest(app);
I've just created webfunc. It's a lightweight HTTP server that supports CORS as well as routing for Google Cloud Functions. Example:
const { serveHttp, app } = require('webfunc')
exports.yourapp = serveHttp([
app.get('/', (req, res) => res.status(200).send('Hello World')),
app.get('/users/{userId}', (req, res, params) => res.status(200).send(`Hello user ${params.userId}`)),
app.get('/users/{userId}/document/{docName}', (req, res, params) => res.status(200).send(`Hello user ${params.userId}. I like your document ${params.docName}`)),
])
In your project's root, simply add a appconfig.json that looks like this:
{
"headers": {
"Access-Control-Allow-Methods": "GET, HEAD, OPTIONS, POST",
"Access-Control-Allow-Headers": "Origin, X-Requested-With, Content-Type, Accept",
"Access-Control-Allow-Origin": "*",
"Access-Control-Max-Age": "1296000"
}
}
Hope this helps.
In the python environment, you can use the flask request object to manage CORS requests.
def cors_enabled_function(request):
if request.method == 'OPTIONS':
# Allows GET requests from any origin with the Content-Type
# header and caches preflight response for an 3600s
headers = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET',
'Access-Control-Allow-Headers': 'Content-Type',
'Access-Control-Max-Age': '3600'
}
return ('', 204, headers)
# Set CORS headers for the main request
headers = {
'Access-Control-Allow-Origin': '*'
}
return ('Hello World!', 200, headers)
See the gcloud docs for more.
You need to send an 'OPTIONS' response by setting its header as follows:
if (req.method === 'OPTIONS') {
res.set('Access-Control-Allow-Methods', '*');
res.set('Access-Control-Allow-Headers', '*');
res.status(204).send('');
}
Runtime: NodeJS 10
If you tried the accepted answer but encountered a preflight error, the docs offer examples of handling it in multiple languages, with the caveat that it only works on public functions, i.e. deployed with --allow-unauthenticated:
exports.corsEnabledFunction = (req, res) => {
res.set("Access-Control-Allow-Origin", "*");
if (req.method === "OPTIONS") {
/* handle preflight OPTIONS request */
res.set("Access-Control-Allow-Methods", "GET, POST");
res.set("Access-Control-Allow-Headers", "Content-Type");
// cache preflight response for 3600 sec
res.set("Access-Control-Max-Age", "3600");
return res.sendStatus(204);
}
// handle the main request
res.send("main response");
};
Another option is to use Express as shown in this post, complete with cross-origin enabled.
You must enable CORS within all your functions, for example hello function:
index.js
const cors = require('cors')();
// My Hello Function
function hello(req, res) {
res.status(200)
.send('Hello, Functions');
};
// CORS and Cloud Functions export
exports.hello = (req, res) => {
cors(req, res, () => {
hello(req, res);
});
}
Don't forget about package.json
package.json
{
"name": "function-hello",
"version": "0.1.0",
"private": true,
"dependencies": {
"cors": "^2.8.5"
}
}
After applying your favourite answer from here, if you're still getting this error, check for uncaught errors in your cloud function. This can result in the browser receiving a CORS error, even when your error has nothing to do with CORS
After CORS enabled if you send POST request to your function also check for your request Content-Type header, mine was set it to "text/plain" and my browser was constantly triggering CORS errors, after setting the header to "application/json" everything worked properly.