How to add GET request parameters in AWS canaries - amazon-web-services

I'm new in Cloudwatch synthetics. I want to test my API with Get request with parameters. Please suggest the configuration for adding parameters or provide blue print scripts which uses parameters in get request.
Thanks

Try This...
var synthetics = require('Synthetics');
const log = require('SyntheticsLogger');
const apiCanaryBlueprint = async function () {
// Handle validation for positive scenario
const validatePositiveCase = async function(res) {
return new Promise((resolve, reject) => {
if (res.statusCode < 200 || res.statusCode > 299) {
throw res.statusCode + ' ' + res.statusMessage;
}
let responseBody = '';
res.on('data', (d) => {
responseBody += d;
});
res.on('end', () => {
// Add validation on 'responseBody' here if required. For ex, your status code is 200 but data might be empty
resolve();
});
});
};
// Handle validation for negative scenario
const validateNegativeCase = async function(res) {
return new Promise((resolve, reject) => {
if (res.statusCode < 400) {
throw res.statusCode + ' ' + res.statusMessage;
}
});
};
let requestOptionsStep1 = {
'hostname': 'myproductsEndpoint.com',
'method': 'GET',
'path': '/test/product/validProductName',
'port': 443,
'protocol': 'https:'
};
let headers = {};
headers['User-Agent'] = [synthetics.getCanaryUserAgentString(), headers['User-Agent']].join(' ');
requestOptionsStep1['headers'] = headers;
// By default headers, post data and response body are not included in the report for security reasons.
// Change the configuration at global level or add as step configuration for individual steps
let stepConfig = {
includeRequestHeaders: true,
includeResponseHeaders: true,
restrictedHeaders: ['X-Amz-Security-Token', 'Authorization'], // Restricted header values do not appear in report generated.
includeRequestBody: true,
includeResponseBody: true
};
await synthetics.executeHttpStep('Verify GET products API with valid name', requestOptionsStep1, validatePositiveCase, stepConfig);
let requestOptionsStep2 = {
'hostname': ‘myproductsEndpoint.com',
'method': 'GET',
'path': '/test/canary/InvalidName(',
'port': 443,
'protocol': 'https:'
};
headers = {};
headers['User-Agent'] = [synthetics.getCanaryUserAgentString(), headers['User-Agent']].join(' ');
requestOptionsStep2['headers'] = headers;
// By default headers, post data and response body are not included in the report for security reasons.
// Change the configuration at global level or add as step configuration for individual steps
stepConfig = {
includeRequestHeaders: true,
includeResponseHeaders: true,
restrictedHeaders: ['X-Amz-Security-Token', 'Authorization'], // Restricted header values do not appear in report generated.
includeRequestBody: true,
includeResponseBody: true
};
await synthetics.executeHttpStep('Verify GET products API with invalid name', requestOptionsStep2, validateNegativeCase, stepConfig);
};
exports.handler = async () => {
return await apiCanaryBlueprint();
};

Related

Handling multipart/form-data in aws lambda

I'm trying to send a request with an image to a lambda function through API gateway.
I'm using this piece of code to parse the form-data-object received by my lambda function. I then upload the image to S3, but when downloading and opening the image from S3, I see that it's corrupt.
I have tried the following npm packages:
parse-multipart
parse-multipart-data
Both do not work, because I get an empty parts-array. The piece of code I use does get results in the array, but the buffers seems to be corrupt.
The problem seems to be in this line of code:
Buffer.from(item.slice(item.search(/Content-Type:\s.+/g) + item.match(/Content-Type:\s.+/g)[0].length + 4, -4), 'binary')
Does anyone has a solution for me?
You can use busboy to parse the multipart form data before uploading to S3 as shown below:
// This code is written in ECMAScript 6 (ES6), not CommonJS syntax.
// So, make sure you add <"type": "module"> in your package.json.
import {S3Client, PutObjectCommand} from '#aws-sdk/client-s3';
import Busboy from 'busboy';
// Initialize the clients outside the function handler to take advantage of execution environment reuse.
const s3Client = new S3Client({region: process.env.AWS_REGION});
// Function handler.
export async function handler(event) {
const {
ContentType: contentType = '',
File: file = '',
} = await FORM.parse(event['body'], event['headers']);
try {
// Adds an object to a bucket. We must have `WRITE` permissions on a bucket to add an object to it.
await s3Client.send(new PutObjectCommand({
Body: file,
Bucket: 'BUCKET_NAME',
ContentType: contentType,
Key: 'SOME_KEY',
}));
return {
isBase64Encoded: false,
statusCode: 200,
body: JSON.stringify({
message: 'Everything is gonna be alright.',
}),
}
} catch (e) {
return {
isBase64Encoded: false,
statusCode: 404,
body: JSON.stringify(e),
}
}
}
const FORM = {
parse(body, headers) {
return new Promise((resolve, reject) => {
const data = {};
const buffer = Buffer.from(body, 'base64');
const bb = Busboy({
headers: Object.keys(headers).reduce((newHeaders, key) => {
// busboy expects lower-case headers.
newHeaders[key.toLowerCase()] = headers[key];
return newHeaders;
}, {}),
limits: {
fileSize: 10485760, // Set as desired.
files: 1,
},
});
bb.on('file', (name, stream, info) => {
const chunks = [];
stream.on('data', (chunk) => {
if (name === 'File') {
chunks.push(chunk);
}
}).on('limit', () => {
reject(new Error('File size limit has been reached.'));
}).on('close', () => {
if (name === 'File') {
data[name] = Buffer.concat(chunks);
data['ContentType'] = info.mimeType;
}
});
});
bb.on('error', (err) => {
reject(err);
});
bb.on('close', () => {
resolve(data);
});
bb.end(buffer);
});
}
};

Modify Cloudfront origin response with Lambda - read-only headers

I have a Cloudfront distribution with a single React site, which is hosting in S3. The origin is connected via REST api. To properly handle queries, I use custom error responses on status 403 and 404 to 200 and route them to root. The root object is index.html and everything seems to be fine.
Now I have a task to add to a distribution an another site, which should be accessible through a subdirectory.
To do this I have to set a root object for a subdirectory and to catch 404 and 403 responses and transfer them to a root object. I've already set up origin and behaviour.
I tried to use theese manuals:
example
source
but it seems that something went wrong
The first approach (CloudFrontSubdirectoryIndex) seems not working at all (the function is not invoked and no rewrite happens), so i tried CloudFront function and it seems to work fine.
The last step is to handle 404 and 403 responses.
Here is the function from the manual:
'use strict';
const http = require('https');
const indexPage = 'index.html';
exports.handler = async (event, context, callback) => {
const cf = event.Records[0].cf;
const request = cf.request;
const response = cf.response;
const statusCode = response.status;
// Only replace 403 and 404 requests typically received
// when loading a page for a SPA that uses client-side routing
const doReplace = request.method === 'GET'
&& (statusCode == '403' || statusCode == '404');
const result = doReplace
? await generateResponseAndLog(cf, request, indexPage)
: response;
callback(null, result);
};
async function generateResponseAndLog(cf, request, indexPage){
const domain = cf.config.distributionDomainName;
const appPath = getAppPath(request.uri);
const indexPath = `/${appPath}/${indexPage}`;
const response = await generateResponse(domain, indexPath);
console.log('response: ' + JSON.stringify(response));
return response;
}
async function generateResponse(domain, path){
try {
// Load HTML index from the CloudFront cache
const s3Response = await httpGet({ hostname: domain, path: path });
const headers = s3Response.headers ||
{
'content-type': [{ value: 'text/html;charset=UTF-8' }]
};
return {
status: '200',
headers: wrapAndFilterHeaders(headers),
body: s3Response.body
};
} catch (error) {
return {
status: '500',
headers:{
'content-type': [{ value: 'text/plain' }]
},
body: 'An error occurred loading the page'
};
}
}
function httpGet(params) {
return new Promise((resolve, reject) => {
http.get(params, (resp) => {
console.log(`Fetching ${params.hostname}${params.path}, status code : ${resp.statusCode}`);
let result = {
headers: resp.headers,
body: ''
};
resp.on('data', (chunk) => { result.body += chunk; });
resp.on('end', () => { resolve(result); });
}).on('error', (err) => {
console.log(`Couldn't fetch ${params.hostname}${params.path} : ${err.message}`);
reject(err, null);
});
});
}
// Get the app path segment e.g. candidates.app, employers.client etc
function getAppPath(path){
if(!path){
return '';
}
if(path[0] === '/'){
path = path.slice(1);
}
const segments = path.split('/');
// will always have at least one segment (may be empty)
return segments[0];
}
// Cloudfront requires header values to be wrapped in an array
function wrapAndFilterHeaders(headers){
const allowedHeaders = [
'content-type',
'content-length',
'last-modified',
'date',
'etag'
];
const responseHeaders = {};
if(!headers){
return responseHeaders;
}
for(var propName in headers) {
// only include allowed headers
if(allowedHeaders.includes(propName.toLowerCase())){
var header = headers[propName];
if (Array.isArray(header)){
// assume already 'wrapped' format
responseHeaders[propName] = header;
} else {
// fix to required format
responseHeaders[propName] = [{ value: header }];
}
}
}
return responseHeaders;
}
When i try to implement this solution (attach the function to origin response) I get
The Lambda function result failed validation: The function tried to add, delete, or change a read-only header.
Here is a list of restricted headers, but I'm not modifying any of them.
If I try not to attach any headers to a response at all, the message is the same.
If I try to attach all headers, CloudFront says that i'm modifying a black-listed header.
Objects in a bucket have only one customized Cache-Control: no-cache metadata.
It seemed to be a fast task, but I'm stuck for two days already.
Any help will be appreciated.
UPD: I've searched the logs and found
ERROR Validation error: Lambda function result failed validation, the function tried to delete read-only header, headerName : Transfer-Encoding.
I'm a little bit confused. This header is not present in origin response, but CF is telling that I deleted it...
I tried to find the value of the header "Transfer-Encoding" that should come from origin (S3) but it seems that it has been disappeared. And CloudFront says that this header is essential.
So I've just hard-coded it and everything becomes fine.
'use strict';
const http = require('https');
const indexPage = 'index.html';
exports.handler = async (event, context, callback) => {
const cf = event.Records[0].cf;
const request = cf.request;
const response = cf.response;
const statusCode = response.status;
// Only replace 403 and 404 requests typically received
// when loading a page for a SPA that uses client-side routing
const doReplace = request.method === 'GET'
&& (statusCode == '403' || statusCode == '404');
const result = doReplace
? await generateResponseAndLog(cf, request, indexPage)
: response;
callback(null, result);
};
async function generateResponseAndLog(cf, request, indexPage){
const domain = cf.config.distributionDomainName;
const appPath = getAppPath(request.uri);
const indexPath = `/${appPath}/${indexPage}`;
const response = await generateResponse(domain, indexPath);
console.log('response: ' + JSON.stringify(response));
return response;
}
async function generateResponse(domain, path){
try {
// Load HTML index from the CloudFront cache
const s3Response = await httpGet({ hostname: domain, path: path });
const headers = s3Response.headers ||
{
'content-type': [{ value: 'text/html;charset=UTF-8' }]
};
s3Response.headers['transfer-encoding'] = 'chunked';
return {
status: '200',
headers: wrapAndFilterHeaders(headers),
body: s3Response.body
};
} catch (error) {
return {
status: '500',
headers:{
'content-type': [{ value: 'text/plain' }]
},
body: 'An error occurred loading the page'
};
}
}
function httpGet(params) {
return new Promise((resolve, reject) => {
http.get(params, (resp) => {
console.log(`Fetching ${params.hostname}${params.path}, status code : ${resp.statusCode}`);
let result = {
headers: resp.headers,
body: ''
};
resp.on('data', (chunk) => { result.body += chunk; });
resp.on('end', () => { resolve(result); });
}).on('error', (err) => {
console.log(`Couldn't fetch ${params.hostname}${params.path} : ${err.message}`);
reject(err, null);
});
});
}
// Get the app path segment e.g. candidates.app, employers.client etc
function getAppPath(path){
if(!path){
return '';
}
if(path[0] === '/'){
path = path.slice(1);
}
const segments = path.split('/');
// will always have at least one segment (may be empty)
return segments[0];
}
// Cloudfront requires header values to be wrapped in an array
function wrapAndFilterHeaders(headers){
const allowedHeaders = [
'content-type',
'content-length',
'content-encoding',
'transfer-encoding',
'last-modified',
'date',
'etag'
];
const responseHeaders = {};
if(!headers){
return responseHeaders;
}
for(var propName in headers) {
// only include allowed headers
if(allowedHeaders.includes(propName.toLowerCase())){
var header = headers[propName];
if (Array.isArray(header)){
// assume already 'wrapped' format
responseHeaders[propName] = header;
} else {
// fix to required format
responseHeaders[propName] = [{ value: header }];
}
}
}
return responseHeaders;
}

output http request node.js in aws lambda

exports.handler = async (event) => {
// TODO implement
const https = require('https');
https.get('https://postman-echo.com/get?' +
'username =' +
'&password =' +
'&date=' +
'&cashSales=' + +
'&creditCardVisa=' +
'&creditCardMaster=' + +
'&creditCardAmex=' +
'&creditCardOthers=0',
res => {
//console.log(res.statusCode);
//console.log(res.headers);
let body = '';
res.on('data', data => {
body += data;
})
res.on('end', () => console.log(body));
})
const response = {
statusCode: 200,
body: JSON.stringify(https.get),
};
return response;
};
I can't seem to output the http request using this function, this can run in node.js but not in aws lambda, even after putting it in response function.
I have restructed the code a bit and wrapped it in a promise. You were not returning anything from the function. see here
const https = require("https");
exports.handler = event => {
return Promise((resolve, reject) => {
https.get(
"https://postman-echo.com/get?" +
"username =" +
"&password =" +
"&date=" +
"&cashSales=" +
+"&creditCardVisa=" +
"&creditCardMaster=" +
+"&creditCardAmex=" +
"&creditCardOthers=0",
resp => {
let data = "";
// A chunk of data has been recieved.
resp.on("data", chunk => {
data += chunk;
});
// The whole response has been received. Print out the result.
resp.on("end", () => {
resolve(data);
});
resp.on("error", error => {
reject(error);
});
}
);
});
};
you were close. But I found two issues.
Your lambda function was not waiting for the http get to finish.
You were returning the incorrect value https.get. you should return the response payload of the https.get call.
I have fixed the issues above.
const https = require("https");
exports.handler = async (event) => {
const httpResponse = await new Promise((resolve, reject) => {
https.get(
"https://postman-echo.com/get?" +
"username =" +
"&password =" +
"&date=" +
"&cashSales=" +
+"&creditCardVisa=" +
"&creditCardMaster=" +
+"&creditCardAmex=" +
"&creditCardOthers=0",
resp => {
let body = '';
// A chunk of data has been recieved.
resp.on("data", chunk => {
body += chunk;
});
// The whole response has been received. Print out the result.
resp.on("end", () => {
resolve(body);
});
resp.on("error", error => {
reject(error);
});
}
);
});
const response = {
statusCode: 200,
body: JSON.stringify(httpResponse),
};
return response;
};
hope this helps.
I think others have already pointed out the problem in your code. You are not waiting for the http get call to complete before returning. Read this article which explains the incorrect handling of Promises in AWS Lambda functions and the solutions for that.
If you use request-promise library (a much more popular one on npm) instead of https and with the new async/await syntax, your code will become very simple.
exports.handler = async function(event) {
const request = require('request-promise');
const res = await request.get('https://postman-echo.com/get', {
params: {
username: '',
password: '',
date: '',
cashSales: '',
creditCardVisa: '',
creditCardMaster: '',
creditCardAmex: '',
creditCardOthers: '0'
}
});
const response = {
statusCode: 200,
body: JSON.stringify(res)
};
return response;
};

How to execute an async fetch request and then retry last failed request?

Apollo link offers an error handler onError
Issue:
Currently, we wish to refresh oauth tokens when they expires during an apollo call and we are unable to execute an async fetch request inside the onError properly.
Code:
initApolloClient.js
import { ApolloClient } from 'apollo-client';
import { onError } from 'apollo-link-error';
import { ApolloLink, fromPromise } from 'apollo-link';
//Define Http link
const httpLink = new createHttpLink({
uri: '/my-graphql-endpoint',
credentials: 'include'
});
//Add on error handler for apollo link
return new ApolloClient({
link: ApolloLink.from([
onError(({ graphQLErrors, networkError, operation, forward }) => {
if (graphQLErrors) {
//User access token has expired
if(graphQLErrors[0].message==="Unauthorized") {
//We assume we have both tokens needed to run the async request
if(refreshToken && clientToken) {
//let's refresh token through async request
return fromPromise(
authAPI.requestRefreshToken(refreshToken,clientToken)
.then((refreshResponse) => {
let headers = {
//readd old headers
...operation.getContext().headers,
//switch out old access token for new one
authorization: `Bearer ${refreshResponse.access_token}`,
};
operation.setContext({
headers
});
//Retry last failed request
return forward(operation);
})
.catch(function (error) {
//No refresh or client token available, we force user to login
return error;
})
)
}
}
}
}
}
}),
What happens is:
Initial graphQL query runs and fails due to unauthorization
The onError function of ApolloLink is executed.
The promise to refresh the token is executed.
The onError function of ApolloLink is executed again??
The promise to refresh the token is completed.
The initial graphQL query result is returned and its data is undefined
Between step 5 and 6, apollo doesn't re-run the initial failed graphQL query and hence the result is undefined.
Errors from console:
Uncaught (in promise) Error: Network error: Error writing result to store for query:
query UserProfile($id: ID!) {
UserProfile(id: $id) {
id
email
first_name
last_name
}
__typename
}
}
The solution should allow us to:
Run an async request when an operation fails
Wait for the result of the request
Retry failed operation with data from the request's result
Operation should succeed to return its intended result
I'm refreshing the token this way (updated OP's):
import { ApolloClient } from 'apollo-client';
import { onError } from 'apollo-link-error';
import { ApolloLink, Observable } from 'apollo-link'; // add Observable
// Define Http link
const httpLink = new createHttpLink({
uri: '/my-graphql-endpoint',
credentials: 'include'
});
// Add on error handler for apollo link
return new ApolloClient({
link: ApolloLink.from([
onError(({ graphQLErrors, networkError, operation, forward }) => {
// User access token has expired
if (graphQLErrors && graphQLErrors[0].message === 'Unauthorized') {
// We assume we have both tokens needed to run the async request
if (refreshToken && clientToken) {
// Let's refresh token through async request
return new Observable(observer => {
authAPI.requestRefreshToken(refreshToken, clientToken)
.then(refreshResponse => {
operation.setContext(({ headers = {} }) => ({
headers: {
// Re-add old headers
...headers,
// Switch out old access token for new one
authorization: `Bearer ${refreshResponse.access_token}` || null,
}
}));
})
.then(() => {
const subscriber = {
next: observer.next.bind(observer),
error: observer.error.bind(observer),
complete: observer.complete.bind(observer)
};
// Retry last failed request
forward(operation).subscribe(subscriber);
})
.catch(error => {
// No refresh or client token available, we force user to login
observer.error(error);
});
});
}
}
})
])
});
Accepted answer is quite good but it wouldn't work with 2 or more concurrent requests. I've crafted the one below after testing different cases with my token renew workflow that fits my needs.
It's necessary to set errorLink before authLink in link pipeline.
client.ts
import { ApolloClient, from, HttpLink } from '#apollo/client'
import errorLink from './errorLink'
import authLink from './authLink'
import cache from './cache'
const httpLink = new HttpLink({
uri: process.env.REACT_APP_API_URL,
})
const apiClient = new ApolloClient({
link: from([errorLink, authLink, httpLink]),
cache,
credentials: 'include',
})
export default apiClient
Cache shared between 2 apollo client instances for setting user query when my renewal token is expired
cache.ts
import { InMemoryCache } from '#apollo/client'
const cache = new InMemoryCache()
export default cache
authLink.ts
import { ApolloLink } from '#apollo/client'
type Headers = {
authorization?: string
}
const authLink = new ApolloLink((operation, forward) => {
const accessToken = localStorage.getItem('accessToken')
operation.setContext(({ headers }: { headers: Headers }) => ({
headers: {
...headers,
authorization: accessToken,
},
}))
return forward(operation)
})
export default authLink
errorLink.ts
import { ApolloClient, createHttpLink, fromPromise } from '#apollo/client'
import { onError } from '#apollo/client/link/error'
import { GET_CURRENT_USER } from 'queries'
import { RENEW_TOKEN } from 'mutations'
import cache from './cache'
let isRefreshing = false
let pendingRequests: Function[] = []
const setIsRefreshing = (value: boolean) => {
isRefreshing = value
}
const addPendingRequest = (pendingRequest: Function) => {
pendingRequests.push(pendingRequest)
}
const renewTokenApiClient = new ApolloClient({
link: createHttpLink({ uri: process.env.REACT_APP_API_URL }),
cache,
credentials: 'include',
})
const resolvePendingRequests = () => {
pendingRequests.map((callback) => callback())
pendingRequests = []
}
const getNewToken = async () => {
const oldRenewalToken = localStorage.getItem('renewalToken')
const {
data: {
renewToken: {
session: { renewalToken, accessToken },
},
},
} = await renewTokenApiClient.mutate({
mutation: RENEW_TOKEN,
variables: { input: { renewalToken: oldRenewalToken } },
})!
localStorage.setItem('renewalToken', renewalToken)
localStorage.setItem('accessToken', accessToken)
}
const errorLink = onError(({ graphQLErrors, operation, forward }) => {
if (graphQLErrors) {
for (const err of graphQLErrors) {
switch (err?.message) {
case 'expired':
if (!isRefreshing) {
setIsRefreshing(true)
return fromPromise(
getNewToken().catch(() => {
resolvePendingRequests()
setIsRefreshing(false)
localStorage.clear()
// Cache shared with main client instance
renewTokenApiClient!.writeQuery({
query: GET_CURRENT_USER,
data: { currentUser: null },
})
return forward(operation)
}),
).flatMap(() => {
resolvePendingRequests()
setIsRefreshing(false)
return forward(operation)
})
} else {
return fromPromise(
new Promise((resolve) => {
addPendingRequest(() => resolve())
}),
).flatMap(() => {
return forward(operation)
})
}
}
}
}
})
export default errorLink
We just had the same issues and after a very complicated solution with lots of Observeables we got a simple solution using promises which will be wrapped as an Observable in the end.
let tokenRefreshPromise: Promise = Promise.resolve()
let isRefreshing: boolean
function createErrorLink (store): ApolloLink {
return onError(({ graphQLErrors, networkError, operation, forward }) => {
if (graphQLErrors) {
// this is a helper method where we are checking the error message
if (isExpiredLogin(graphQLErrors) && !isRefreshing) {
isRefreshing = true
tokenRefreshPromise = store.dispatch('authentication/refreshToken')
tokenRefreshPromise.then(() => isRefreshing = false)
}
return fromPromise(tokenRefreshPromise).flatMap(() => forward(operation))
}
if (networkError) {
handleNetworkError(displayErrorMessage)
}
})
}
All pending requests are waiting for the tokenRefreshPromise and will then be forwarded.

How to POST data for evaluation in middleware in loopback?

I want to use custom API to evaluate data which are posted by applications but remote methods are not accepted in middleware in loopback
module.exports = function () {
const http = require('https');
var request = require('request');
var { Lib } = require('Lib');
var lib = new Lib;
verification.checkID = function (ID, cb) {
cb(null, 'ID is :' + ID);
}
verification.remoteMethod('greet', {
accepts: {
arg: 'ID',
type: 'string'
},
returns: {
arg: 'OK',
type: 'string'
}
});
module.exports = function () {
const http = require('https');
var request = require('request');
var { Lib } = require('Lib');
var lib = new Lib;
verification.checkID = function (ID, cb) {
cb(null, 'ID is :' + ID);
}
verification.remoteMethod('greet', {
'http': { // add the verb here
'path': '/greet',
'verb': 'post'
},
accepts: {
arg: 'ID',
type: 'string'
},
returns: {
arg: 'OK',
type: 'string'
}
});
Update
module.exports = function(server) {
// Install a `/` route that returns server status
var router = server.loopback.Router();
router.get('/', server.loopback.status());
router.get('/ping', function(req, res) { // your middle ware function now you need to call the next() here
res.send('pong');
});
server.use(router);
};
To evaluate is something i am not getting please check this link too Intercepting error handling with loopback
Regarding to fallowing question How to make a simple API for post method?
I find my solution in fallowing way:
module.exports = function(server) {
const https = require('https');
var request = require('request');
return function verification(req, res, next) {
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE');
res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
res.setHeader('Access-Control-Allow-Credentials', true);
var request;
var response;
var body = '';
// When a chunk of data arrives.
req.on('data', function (chunk) {
// Append it.
body += chunk;
});
// When finished with data.
req.on('end', function () {
// Show what just arrived if POST.
if (req.method === 'POST') {
console.log(body);
}
// Which method?
switch (req.method) {
case 'GET':
Verify url and respond with appropriate data.
handleGet(req, res);
Response has already been sent.
response = '';
break;
case 'POST':
// Verify JSON request and respond with stringified JSON response.
response = handlePost(body);
break;
default:
response = JSON.stringify({ 'error': 'Not A POST' });
break;
}
// Send the response if not empty.
if (response.length !== 0) {
res.write(response);
res.end();
}
// Paranoid clear of the 'body'. Seems to work without
// this, but I don't trust it...
body = '';
});
// If error.
req.on('error', function (err) {
res.write(JSON.stringify({ 'error': err.message }));
res.end();
});
//
};
function handlePost(body) {
var response = '';
var obj = JSON.parse(body);
// Error if no 'fcn' property.
if (obj['fcn'] === 'undefined') {
return JSON.stringify({ 'error': 'Request method missing' });
}
// Which function.
switch (obj['fcn']) {
// Calculate() requres 3 arguments.
case 'verification':
// Error if no arguments.
if ((obj['arg'] === 'undefined') || (obj['arg'].length !== 3)) {
response = JSON.stringify({ 'error': 'Arguments missing' });
break;
}
// Return with response from method.
response = verification(obj['arg']);
break;
default:
response = JSON.stringify({ 'error': 'Unknown function' });
break;
}
return response;
};
function verification(arg) {
var n1 = Number(arg[0]);
var n2 = Number(arg[1]);
var n3 = Number(arg[2]);
var result;
// Addem up.
result = n1 + n2 + n3;
// Return with JSON string.
return JSON.stringify({ 'result': result });
};
};