Unable to access XML response in Postman pre-request script - postman

I'm having trouble extracting the xml respose from within the Postman pre-request script. My pre-request looks like the following:
pm.sendRequest({
url: "https://login.salesforce.com/services/Soap/u/40.0",
method: 'POST',
header: {
'soapaction': "Required",
'Content-Type': 'text/xml',
},
body: {
mode: 'raw',
raw: "<se:Envelope xmlns:se=\"http://schemas.xmlsoap.org/soap/envelope/\"> \
<se:Header/> \
<se:Body> \
<login xmlns=\"urn:partner.soap.sforce.com\"> \
<username>user</username> \
<password>password</password> \
</login> \
</se:Body> \
</se:Envelope>"
}
}, function (err, responseBody) {
console.log(pm.request.body);
var xmlTree = xml2Json(responseBody);
console.log(xmlTree);
console.log("sessionid: " + xmlTree['soapenv:Envelope']['soapenv:Body'].loginResponse.result.sessionId);
//pm.environment.set("sys_id", response.json().result.sys_id);
postman.setEnvironmentVariable("sessionid", xmlTree['soapenv:Envelope']['soapenv:Body'].loginResponse.result.sessionId);
});
When i look at the response in the Postman console i see the correct response however when i try to access it via "pm.request.body" or simply the "responseBody" i get an empty/null value.
Below is the response i see and also note when i output to log it's empty:
Any thoughts?
Additionally, I'm able to do the exact same thing with a url that responds with JSON payload and using the same method i'm able to extract the response and work with the values. Does anyone know if there's something different about XML responses? Or if they should be handled differently?

It turned out to be a super easy solution, after digging around some more i found that the response was returning a "Stream" object which needed to be deserialzed with the text() function.
pm.sendRequest(loginRequest, function (err, response) {
var xmlTree = xml2Json(response.text());
var sessionid = xmlTree['soapenv:Envelope']['soapenv:Body'].loginResponse.result.sessionId;
console.log("sessionid: " + sessionid);
pm.environment.set("sessionid", sessionid);
});

Related

Postman test script - how to call an api twice to simulate 409 error

I am trying to run a few automated testing using the Postman tool. For regular scenarios, I understand how to write pre-test and test scripts. What I do not know (and trying to understand) is, how to write scripts for checking 409 error (let us call it duplicate resource check).
I want to run a create resource api like below, then run it again and ensure that the 2nd invocation really returns 409 error.
POST /myservice/books
Is there a way to run the same api twice and check the return value for 2nd invocation. If yes, how do I do that. One crude way of achieving this could be to create a dependency between two tests, where the first one creates a resource, and the second one uses the same payload once again to create the same resource. I am looking for a single test to do an end-to-end testing.
Postman doesn't really provide a standard way, but is still flexible. I realized that we have to write javascript code in the pre-request tab, to do our own http request (using sendRequest method) and store the resulting data into env vars for use by the main api call.
Here is a sample:
var phone = pm.variables.replaceIn("{{$randomPhoneNumber}}");
console.log("phone:", phone)
var baseURL = pm.variables.replaceIn("{{ROG_SERVER}}:{{ROG_PORT}}{{ROG_BASE_URL}}")
var usersURL = pm.variables.replaceIn("{{ROG_SERVICE}}/users")
var otpURL = `${baseURL}/${phone}/_otp_x`
// Payload for partner creation
const payload = {
"name": pm.variables.replaceIn("{{username}}"),
"phone":phone,
"password": pm.variables.replaceIn("{{$randomPassword}}"),
}
console.log("user payload:", payload)
function getOTP (a, callback) {
// Get an OTP
pm.sendRequest(otpURL, function(err, response) {
if (err) throw err
var jsonDaata = response.json()
pm.expect(jsonDaata).to.haveOwnProperty('otp')
pm.environment.set("otp", jsonDaata.otp)
pm.environment.set("phone", phone);
pm.environment.set("username", "{{$randomUserName}}")
if (callback) callback(jsonDaata.otp)
})
}
// Get an OTP
getOTP("a", otp => {
console.log("OTP received:", otp)
payload.partnerRef = pm.variables.replaceIn("{{$randomPassword}}")
payload.otp = otp
//create a partner user with the otp.
let reqOpts = {
url: usersURL,
method: 'POST',
headers: { 'Content-Type': 'application/json'},
body: JSON.stringify(payload)
}
pm.sendRequest(reqOpts, (err, response) => {
console.log("response?", response)
pm.expect(response).to.have.property('code', 201)
})
// Get a new OTP for the main request to be executed.
getOTP()
})
I did it in my test block. Create your normal request as you would send it, then in your tests, validate the original works, and then you can send the second command and validate the response.
You can also use the pre and post scripting to do something similar, or have one test after the other in the file (they run sequentially) to do the same testing.
For instance, I sent an API call here to create records. As I need the Key_ to delete them, I can make a call to GET /foo at my API
pm.test("Response should be 200", function () {
pm.response.to.be.ok;
pm.response.to.have.status(200);
});
pm.test("Parse Key_ values and send DELETE from original request response", function () {
var jsonData = JSON.parse(responseBody);
jsonData.forEach(function (TimeEntryRecord) {
console.log(TimeEntryRecord.Key_);
const DeleteURL = pm.variables.get('APIHost') + '/bar/' + TimeEntryRecord.Key_;
pm.sendRequest({
url: DeleteURL,
method: 'DELETE',
header: { 'Content-Type': 'application/json' },
body: { TimeEntryRecord }
}, function (err, res) {
console.log("Sent Delete: " + DeleteURL );
});
});
});

Postman access refresh_token?

I configured Postman to use the authorization code flow. Problem is our tokens expiry pretty fast and I need to re-run the flow every time it expires. So I was thinking to implement a refresh_token flow in the pre-requests script (unless there is a Postman-native way to do it).
Now my question is where can I find the refresh_token? Is there any way to access it or is it 'thrown away' and only the access_token is used?
Add the following code to the Collection Pre-request Script. (Edit it to your own url and body.)
// Set refresh and access tokens
const loginRequest = {
url: pm.environment.get("mainUrl") + "/authenticate/login", //The url that the token returns
method: 'POST',
header: {
'content-type': 'application/json',
'Accept': "*/*"
},
body: {
mode: 'raw',
raw: JSON.stringify({ //Your body
"username": pm.environment.get("username"),
"password": pm.environment.get("password")
})
}
};
pm.sendRequest(loginRequest, function (err, res) {
pm.environment.set("accessToken", res.json().accessToken); //The token returned in the response and the environment value to which the value will be sent
pm.environment.set("refreshToken", res.json().refreshToken);
});
This request runs before every request.
Finally, In the "Token" field in the "Authorization" tab of the requests, call the accessToken value from the environments.
{{accessToken}}
Each time the request runs, it will refresh the token value and use this value.

AWS Lambda / API Gateway Not passing through encoding

I'm trying to convert a working Lumen API service to AWS, and am stumped on getting an external REST API service to work. The service returns data compressed, but this fact isn't being passed through back to the app (Vue) in the browser properly. I tried adding in the headers in the response, as shown below, but it still isn't working. I can see the headers in the response in the browser console, but the browser still isn't interpreting it, so the data still looks like garbage. Any clues as to how to make this work?
var req = require('request');
exports.handler = function (event, context, callback) {
const params = {
url: 'http://api.service',
headers: { 'Authorization': 'code',
'Accept-Encoding': 'gzip,deflate',
'Content-Type': 'application/json' },
json: {'criteria': {
'checkInDate': '2019-10-22',
'checkOutDate': '2019-10-25',
'additional': {'minimumStarRating': 0},
'cityId': 11774}}
};
req.post(params, function(err, res, body) {
if(err){
callback(err, null);
} else{
callback(null, {
"statusCode": 200,
"headers": {
"Content-Type": "application/json",
"Content-Encoding": "gzip"
},
"body": body
});
}
});
};
In the case you are seeing all scrambled characters, chance is you have not let API Gateway treat your Lambda answer as binary yet ( Since it is gzip-ed from your lambda )
Take a look at the document
https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-payload-encodings-configure-with-console.html
And this article
Unfortunately, the API Gateway is currently oblivious of gzip. If
we’re using a HTTP proxy, and the other HTTP endpoint returns a
gzipped response, it’ll try to reencode it, garbling the response.
We’ll have to tell the API Gateway to treat our responses as binary
files — not touching it in any way.
https://techblog.commercetools.com/gzip-on-aws-lambda-and-api-gateway-5170bb02b543

Is it theoretically possible to use `putRecord` in Kinesis via singular http POST request?

I've been experiencing some issues with AWS Kinesis inasmuch as I have a stream set up and I want to use a standard http POST request to invoke a Kinesis PutRecord call on my stream. I'm doing this because bundle-size of my resultant javascript application matters and I'd rather not import the aws-sdk to accomplish something that should (on paper) be possible.
Just so you know, I've looked at this other stack overflow question about the same thing and It was... sort of informational.
Now, I already have a method to sigv4 sign a request using an access key, secret token, and session token. but when I finally get the result of signing the request and send it using the in-browser fetch api, the service tanks with (or with a json object citing the same thing, depending on my Content-Type header, I guess) as the result.
Here's the code I'm working with
// There is a global function "sign" that does sigv4 signing
// ...
var payload = {
Data: { task: "Get something working in kinesis" },
PartitionKey: "1",
StreamName: "MyKinesisStream"
}
var credentials = {
"accessKeyId": "<access.key>",
"secretAccessKey": "<secret.key>",
"sessionToken": "<session.token>",
"expiration": 1528922673000
}
function signer({ url, method, data }) {
// Wrapping with URL for piecemeal picking of parsed pieces
const parsed = new URL(url);
const [ service, region ] = parsed.host.split(".");
const signed = sign({
method,
service,
region,
url,
// Hardcoded
headers : {
Host : parsed.host,
"Content-Type" : "application/json; charset=UTF-8",
"X-Amz-Target" : "Kinesis_20131202.PutRecord"
},
body : JSON.stringify(data),
}, credentials);
return signed;
}
// Specify method, url, data body
var signed = signer({
method: "POST",
url: "https://kinesis.us-west-2.amazonaws.com",
data : JSON.stringify(payload)
});
var request = fetch(signed.url, signed);
When I look at the result of request, I get this:
{
Output: {
__type: "com.amazon.coral.service#InternalFailure"},
Version: "1.0"
}
Now I'm unsure as to whether Kinesis is actually failing here, or if my input is malformed?
here's what the signed request looks like
{
"method": "POST",
"service": "kinesis",
"region": "us-west-2",
"url": "https://kinesis.us-west-2.amazonaws.com",
"headers": {
"Host": "kinesis.us-west-2.amazonaws.com",
"Content-Type": "application/json; charset=UTF-8",
"X-Amz-Target": "Kinesis_20131202.PutRecord",
"X-Amz-Date": "20180613T203123Z",
"X-Amz-Security-Token": "<session.token>",
"Authorization": "AWS4-HMAC-SHA256 Credential=<access.key>/20180613/us-west-2/kinesis/aws4_request, SignedHeaders=content-type;host;x-amz-target, Signature=ba20abb21763e5c8e913527c95a0c7efba590cf5ff1df3b770d4d9b945a10481"
},
"body": "\"{\\\"Data\\\":{\\\"task\\\":\\\"Get something working in kinesis\\\"},\\\"PartitionKey\\\":\\\"1\\\",\\\"StreamName\\\":\\\"MyKinesisStream\\\"}\"",
"test": {
"canonical": "POST\n/\n\ncontent-type:application/json; charset=UTF-8\nhost:kinesis.us-west-2.amazonaws.com\nx-amz-target:Kinesis_20131202.PutRecord\n\ncontent-type;host;x-amz-target\n508d2454044bffc25250f554c7b4c8f2e0c87c2d194676c8787867662633652a",
"sts": "AWS4-HMAC-SHA256\n20180613T203123Z\n20180613/us-west-2/kinesis/aws4_request\n46a252f4eef52991c4a0903ab63bca86ec1aba09d4275dd8f5eb6fcc8d761211",
"auth": "AWS4-HMAC-SHA256 Credential=<access.key>/20180613/us-west-2/kinesis/aws4_request, SignedHeaders=content-type;host;x-amz-target, Signature=ba20abb21763e5c8e913527c95a0c7efba590cf5ff1df3b770d4d9b945a10481"
}
(the test key is used by the library that generates the signature, so ignore that)
(Also there are probably extra slashes in the body because I pretty printed the response object using JSON.stringify).
My question: Is there something I'm missing? Does Kinesis require headers a, b, and c and I'm only generating two of them? Or is this internal error an actual failure. I'm lost because the response suggests nothing I can do on my end.
I appreciate any help!
Edit: As a secondary question, am I using the X-Amz-Target header correctly? This is how you reference calling a service function so long as you're hitting that service endpoint, no?
Update: Followinh Michael's comments, I've gotten somewhere, but I still haven't solved the problem. Here's what I did:
I made sure that in my payload I'm only running JSON.stringify on the Data property.
I also modified the Content-Type header to be "Content-Type" : "application/x-amz-json-1.1" and as such, I'm getting slightly more useful error messages back.
Now, my payload is still mostly the same:
var payload = {
Data: JSON.stringify({ task: "Get something working in kinesis" }),
PartitionKey: "1",
StreamName: "MyKinesisStream"
}
and my signer function body looks like this:
function signer({ url, method, data }) {
// Wrapping with URL for piecemeal picking of parsed pieces
const parsed = new URL(url);
const [ service, region ] = parsed.host.split(".");
const signed = sign({
method,
service,
region,
url,
// Hardcoded
headers : {
Host : parsed.host,
"Content-Type" : "application/json; charset=UTF-8",
"X-Amz-Target" : "Kinesis_20131202.PutRecord"
},
body : data,
}, credentials);
return signed;
}
So I'm passing in an object that is partially serialized (at least Data is) and when I send this to the service, I get a response of:
{"__type":"SerializationException"}
which is at least marginally helpful because it tells me that my input is technically incorrect. However, I've done a few things in an attempt to correct this:
I've run JSON.stringify on the entire payload
I've changed my Data key to just be a string value to see if it would go through
I've tried running JSON.stringify on Data and then running btoa because I read on another post that that worked for someone.
But I'm still getting the same error. I feel like I'm so close. Can you spot anything I might be missing or something I haven't tried? I've gotten sporadic unknownoperationexceptions but I think right now this Serialization has me stumped.
Edit 2:
As it turns out, Kinesis will only accept a base64 encoded string. This is probably a nicety that the aws-sdk provides, but essentially all it took was Data: btoa(JSON.stringify({ task: "data"})) in the payload to get it working
While I'm not certain this is the only issue, it seems like you are sending a request body that contains an incorrectly serialized (double-encoded) payload.
var obj = { foo: 'bar'};
JSON.stringify(obj) returns a string...
'{"foo": "bar"}' // the ' are not part of the string, I'm using them to illustrate that this is a thing of type string.
...and when parsed with a JSON parser, this returns an object.
{ foo: 'bar' }
However, JSON.stringify(JSON.stringify(obj)) returns a different string...
'"{\"foo\": \"bar\"}"'
...but when parsed, this returns a string.
'{"foo": "bar"}'
The service endpoint expects to parse the body and get an object, not a string... so, parsing the request body (from the service's perspective) doesn't return the correct type. The error seems to be a failure of the service to parse your request at a very low level.
In your code, body: JSON.stringify(data) should just be body: data because earlier, you already created a JSON object with data: JSON.stringify(payload).
As written, you are effectively setting body to JSON.stringify(JSON.stringify(payload)).
Not sure if you ever figured this out, but this question pops up on Google when searching for how to do this. The one piece I think you are missing is that the Record Data field must be base64 encoded. Here's a chunk of NodeJS code that will do this (using PutRecords).
And for anyone asking, why not just use the SDK? I currently must stream data from a cluster that cannot be updated to a NodeJS version that the SDK requires due to other dependencies. Yay.
const https = require('https')
const aws4 = require('aws4')
const request = function(o) { https.request(o, function(res) { res.pipe(process.stdout) }).end(o.body || '') }
const _publish_kinesis = function(logs) {
const kin_logs = logs.map(function (l) {
let blob = JSON.stringify(l) + '\n'
let buff = Buffer.from(blob, 'binary');
let base64data = buff.toString('base64');
return {
Data: base64data,
PartitionKey: '0000'
}
})
while(kin_logs.length > 0) {
let data = JSON.stringify({
Records: kin_logs.splice(0,250),
StreamName: 'your-streamname'
})
let _request = aws4.sign({
hostname: 'kinesis.us-west-2.amazonaws.com',
method: 'POST',
body: data,
path: '/?Action=PutRecords',
headers: {
'Content-Type': 'application/x-amz-json-1.1',
'X-Amz-Target': 'Kinesis_20131202.PutRecords'
},
}, {
secretAccessKey: "****",
accessKeyId: "****"
// sessionToken: "<your-session-token>"
})
request(_request)
}
}
var logs = [{
'timeStamp': new Date().toISOString(),
'value': 'test02',
},{
'timeStamp': new Date().toISOString(),
'value': 'test01',
}]
_publish_kinesis(logs)

Postman - Using complete response from one request as body of another request

I have a request were the exact response from one request is the body of another request.
Is there an easy way to store the response from one request to reuse as the body of another request in Postman?
I tried storing the response body in a global variable customerData and then having the body of my other request be {{customerData}} but this does not work.
Thanks for the help.
You could achieve this by using the sendRequest() function in the Tests tab of your first GET request. This will send the request and get the data once this has completed, it will then POST that same response data to another endpoint.
This is a very basic example that can be added to the Tests tab, this can be changed/adapted to your own context:
pm.sendRequest({
url: 'localhost:3000/post',
method: 'POST',
headers: {
"Content-Type": "application/json"
},
body: {
mode: 'raw',
raw: JSON.stringify(pm.response.json())
}
}, (err, res) => {
console.log(res)
})
This is what it looks like in Postman - I've sent a basic request to the /get route and in the Tests tab, I'm using that response data as the payload for the POST request by inserting the pm.response.json(). You can see the request body for the /post route has been taken from the first request.
API-1 - Test Tab
//Get the response of first API
var jsonData=JSON.parse(responseBody);
//Convert the JSON response to String
var jsonString=JSON.stringify(jsonData);
//Store in environment variable
pm.environment.set("MyPayLoad", jsonString);
API-2 - Body
{{MyPayLoad}}
This way response from API-1 is passed as payload to API-2