Cannot POST when sending request in Postman - postman

I am having a small issue. I am trying to send a request in Postman, but I receive "
Cannot POST /api/logTemp/1/25
"
Here is my app.js:
const express = require('express')
const bodyParser = require('body-parser')
const cors= require('cors')
const fs= require('fs')
const path= require('path')
const morgan = require('morgan')
const router = require('./routes/route')
const app = express()
app.use(cors())
// parse application/x-www-form-urlencoded
app.use(bodyParser.urlencoded({ extended: false}))
// parse application/json
app.use(bodyParser.json())
app.use(morgan('dev'))
//create a write stream (in append mode)
var accessLogStream = fs.createWriteStream(path.join(__dirname, '/logs/access.log'), {flags: 'a'})
//setup the logger
app.use(morgan('combined', {stream: accessLogStream}))
app.use(router)
app.get('/', (req, res) => {
res.send('Hello World!')
})
const port = 3000
//app.listen(process.env.PORT || port, (err) => {
app.listen(port, () => {
console.log('Server started running on :' + port)
})
and here is my controller file:
const { getEnabledCategories } = require('trace_events');
const mysql = require('../database/db')
class MainController {
async logTemp(req, res){
console.log(req.params.temperature)
console.log(req.params.deviceID)
if(req.params.deviceID != null && req.params.temperature != null){
let deviceID = req.params.deviceID
let temperature = req.params.temperature;
var sql = `insert into log_temp (log_date, device_id, temp) values (now(),${deviceID}, ${temperature});`
mysql.query(sql, (error,data,fields) => {
if(error){
res.status(500)
res.send(error.message)
} else{
console.log(data)
res.json({
status: 200,
message: "Log uploaded successfully",
affectedRows: data.affectedRows
})
}
})
} else {
res.send('Por favor llena todos los datos!')
}
}
async getLogs(req, res){
console.log("Get Logs")
console.log(req.params.deviceID)
if(req.params.deviceID!=null){
let deviceID = req.params.deviceID;
var sql = `SELECT * FROM log_temp where device_id=${deviceID}`
mysql.query(sql, (error, data, fields) => {
if(error) {
res.status(500)
res.send(error.message)
} else {
console.log(data)
res.json({
data
})
}
})
}
}
}
const tempController = new MainController()
module.exports = tempController;
The code above was made in Visual Studio. It is odd because getLogs does work but logTemp does not. What I intend to do with logTemp is add a new value (which is the value temperature) to MySQL database. The connection to the database worked just fine, as well as localhost. If you need any more info in order to help me find a solution, please let me know and I will be more than happy to provide it. Also, i'm sorry for any grammar errors, english is not my first language :)

Related

AWS: API gateway 502 error randomly with Runtime segmentation faults

I am using AWS and have an api which is called via API gateway which calls a node.js lambda function.
Very often but randomly I get 502 responses but when I immediately try again with the exact same request I get a normal response. So I decided to search the logs to see if I could find any issues.
The following is what I found for 1 of the requests:
RequestId: xxxxx Error: Runtime exited with error: signal: segmentation fault Runtime.ExitError
as well as:
xxxx ERROR Uncaught Exception
{
"errorType": "Error",
"errorMessage": "Quit inactivity timeout",
"code": "PROTOCOL_SEQUENCE_TIMEOUT",
"fatal": true,
"timeout": 30000,
"stack": [
"Error: Quit inactivity timeout",
" at Quit.<anonymous> (/opt/nodejs/node_modules/mysql/lib/protocol/Protocol.js:160:17)",
" at Quit.emit (node:events:527:28)",
" at Quit.emit (node:domain:475:12)",
" at Quit._onTimeout (/opt/nodejs/node_modules/mysql/lib/protocol/sequences/Sequence.js:124:8)",
" at Timer._onTimeout (/opt/nodejs/node_modules/mysql/lib/protocol/Timer.js:32:23)",
" at listOnTimeout (node:internal/timers:559:17)",
" at processTimers (node:internal/timers:502:7)"
]
}
the following is my reusable sql connector:
const CustomSecret = require('../secrets/CustomSecret');
const mysql = require("mysql");
module.exports = class MySqlConnect {
databaseCredObject;
constructor() {
}
async queryDb(sql, args) {
if (!this.databaseCredObject) {
await this.fetchSecret();
}
let connection = null;
const connection_settings = {
host: this.databaseCredObject.host,
user: this.databaseCredObject.username,
password: this.databaseCredObject.password,
database: 'logbook'
};
connection = mysql.createConnection(connection_settings);
return new Promise((resolve, reject) => {
connection.connect(function (err) {
if (err) {
console.log('error when connecting to db:', err);
} else {
console.log('Connected');
connection.query(sql, args, function (err, result) {
connection.end();
if (err) {
return reject(err);
}
return resolve(result);
});
}
});
});
}
async fetchSecret() {
const databaseCredString = await CustomSecret.getSecret('secretname', 'eu-west-2');
this.databaseCredObject = JSON.parse(databaseCredString);
}
}
Finally this is an example of my lambda function (shortened version):
const {compress, decompress} = require("compress-json");
const MySqlConnect = require("customPackagePath/MySqlConnect");
const CustomJwt = require("customPackagePath/CustomJwt");
const AWS = require("aws-sdk");
const warmer = require("lambda-warmer");
exports.handler = async (event) => {
if (await warmer(event)) {
console.log("Warming");
return 'warmed';
}
let responseCode = 200;
let response = {};
response.headers = {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*'
};
const bodyContent = JSON.parse(event.body);
const dataType = bodyContent.dataType;
const webAuth = new CustomJwt();
const decodedToken = webAuth.decodeToken(event.headers.Authorization);
const userUUID = decodedToken['uuid'];
const connection = new MySqlConnect();
let sql;
switch (dataType) {
case 'userPreferences':
sql = await connection.queryDb('SELECT * FROM user WHERE uuid = ?', [userUUID]);
break;
}
let data = [];
for (let index in sql) {
data.push(JSON.parse(JSON.stringify(sql[index])));
}
const returnData = {
data
};
let compressed = compress(returnData);
response.statusCode = responseCode;
response.body = JSON.stringify(compressed);
return response;
};
Now I am new to infrastructure stuff. But it seems to me that once a lambda function has been called, its not closing or ending correctly. Also I am using the lambda warmer to keep the functions warm as seen in the lambda code and not sure if that is causing any issues.
Appreciate any help with this as I can't seem to figure it out.
Thanks
After doing more research I decided to add this to my Lambda function:
exports.handler = async (event, context, callback) => {
and the return like this
callback(null, response);
and ever since this issue seems to have been resolved. I am not entirely sure why but for now its looking good :)

Send file from GCP bucket to 3rd party pdf converter

I am trying to adapt a Qwiklabs tutorial to use pdfCrowd, rather than LibreOffice.
The service works by downloading a file from one GCP storage bucket for 'uploads', processing it, then uploading it to another bucket for 'processed' files, then deleting the original from the 'uploads' bucket.
This is the function that downloads, uploads, sends off for processing, then deletes. This is the code from the Qwiklabs tut and it works great.
app.post('/', async (req, res) => {
try {
const file = decodeBase64Json(req.body.message.data);
await downloadFile(file.bucket, file.name);
const pdfFileName = await convertFile(file.name);
await uploadFile(process.env.PDF_BUCKET, pdfFileName);
await deleteFile(file.bucket, file.name);
}
catch (ex) {
console.log(`Error: ${ex}`);
}
res.set('Content-Type', 'text/plain');
res.send('\n\nOK\n\n');
})
The original convertFile function is:
async function convertFile(fileName) {
const cmd = 'libreoffice --headless --convert-to pdf --outdir /tmp ' +
`"/tmp/${fileName}"`;
console.log(cmd);
const { stdout, stderr } = await exec(cmd);
if (stderr) {
throw stderr;
}
console.log(stdout);
pdfFileName = fileName.replace(/\.\w+$/, '.pdf');
return pdfFileName;
}
The problem comes when I change my convertFile function. LibreOffice accepts file.name, but pdfCrowd wants the file path.
So I change the function with pdfCrowd to:
async function convertFile(fileName) {
// create the API client instance
const _newPdfPath = `/tmp/${fileName.replace(/\.\w+$/, '.pdf')}`
const client = new pdfcrowd.HtmlToPdfClient("demo", "ce544b6ea52a5621fb9d55f8b542d14d");
// run the conversion and write the result to a file
client.convertFileToFile(`/tmp/${fileName}`, _newPdfPath, function (err, fileName) {
if (err)
return console.error("Pdfcrowd Error: " + err);
console.log("Success: the file was created " + fileName);
});
pdfFileName = fileName.replace(/\.\w+$/, '.pdf');
return pdfFileName;
}
Now the pdf conversion returns SUCCESS, but after a notification to say there is no file or directory as specified in the 'out' file path I passed to convertFileToFile. The file specified by _newPdfPath doesn't exist.
Error: Error: ENOENT: no such file or directory, stat '/tmp/mynew.pdf'
Success: the file was created /tmp/hello (31).pdf
The pdfCrowd function should be creating a file in the tmp directory, but is the async waiting for the file to be created in the tmp directory?
My complete code is:
const {promisify} = require('util');
const {Storage} = require('#google-cloud/storage');
const exec = promisify(require('child_process').exec);
const storage = new Storage();
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
const pdfcrowd = require("pdfcrowd");
app.use(bodyParser.json());
const port = process.env.PORT || 8080;
app.listen(port, () => {
console.log('Listening on port', port);
});
app.post('/', async (req, res) => {
try {
const file = decodeBase64Json(req.body.message.data);
// console.log("FILE=========", file, req.body.message.data)
await downloadFile(file.bucket, file.name);
const pdfFileName = await convertFile(file.name);
await uploadFile(process.env.PDF_BUCKET, pdfFileName);
await deleteFile(file.bucket, file.name);
}
catch (ex) {
console.log(`Error: ${ex}`);
}
res.set('Content-Type', 'text/plain');
res.send('\n\nOK\n\n');
})
function decodeBase64Json(data) {
return JSON.parse(Buffer.from(data, 'base64').toString());
}
async function downloadFile(bucketName, fileName) {
const options = {destination: `/tmp/${fileName}`};
await storage.bucket(bucketName).file(fileName).download(options);
}
async function convertFile(fileName) {
// create the API client instance
const _newPdfPath = `/tmp/${fileName.replace(/\.\w+$/, '.pdf')}`
const client = new pdfcrowd.HtmlToPdfClient("demo", "ce544b6ea52a5621fb9d55f8b542d14d");
// run the conversion and write the result to a file
client.convertFileToFile(`/tmp/${fileName}`, _newPdfPath, function (err, fileName) {
if (err)
return console.error("Pdfcrowd Error: " + err);
console.log("Success: the file was created " + fileName);
});
pdfFileName = fileName.replace(/\.\w+$/, '.pdf');
return pdfFileName;
}
async function deleteFile(bucketName, fileName) {
await storage.bucket(bucketName).file(fileName).delete();
}
async function uploadFile(bucketName, fileName) {
await storage.bucket(bucketName).upload(`/tmp/${fileName}`);
}
The problem is that your convertFile function finishes before convertFileToFile callback is invoked.
I'd pass callback for success and error into convertFile, e.g.
app.post('/', async (req, res) => {
try {
const file = decodeBase64Json(req.body.message.data);
// console.log("FILE=========", file, req.body.message.data)
await downloadFile(file.bucket, file.name);
let on_pdf_done = async function(pdfFileName) {
await uploadFile(process.env.PDF_BUCKET, pdfFileName);
await deleteFile(file.bucket, file.name);
res.set('Content-Type', 'text/plain');
res.send('\n\nOK\n\n');
};
let on_pdf_fail = function() {
res.set('Content-Type', 'text/plain');
res.send('\n\nERROR\n\n');
};
convertFile(file.name, on_pdf_done, on_pdf_fail);
}
catch (ex) {
console.log(`Error: ${ex}`);
}
})
function convertFile(fileName, success_callback, fail_callback) {
// create the API client instance
const _newPdfPath = `/tmp/${fileName.replace(/\.\w+$/, '.pdf')}`
const client = new pdfcrowd.HtmlToPdfClient("demo", "ce544b6ea52a5621fb9d55f8b542d14d");
// run the conversion and write the result to a file
client.convertFileToFile(`/tmp/${fileName}`, _newPdfPath, function (err, fileName) {
if (err)
return fail_callback();
success_callback(fileName.replace(/\.\w+$/, '.pdf'));
});
}

Compare node_modules for each releases

Looking for a way to compare package changes between releases.
I need a script or something that when run will show the difference (what packages are new, deleted or updated) between master tagged releases, by comparing the node_modules.
List with all changed or new packages/modules within tree of node_modules.
just came back from holiday, so this is the way I attempted a solution:
const checker = require('license-checker')
const compareVersions = require('compare-versions')
const rimraf = require('rimraf')
const { WebClient } = require('#slack/web-api')
const { exec } = require('child_process')
// An access token (from your Slack app or custom integration - xoxp, xoxb)
const token =
'xoxp-3712510934-8544rv58640-699363584817-a66630cfebf2f81e59478c3f8u0e178b'
const channel = 'where_to_post_report'
let prevReleasePackets
let currReleasePackets
function comparerVersion(otherArray) {
return current =>
otherArray.filter(other => other.name === current.name).length === 0
}
function comparerVersionNo(otherArray) {
return current =>
otherArray.filter(
other =>
other.name === current.name &&
compareVersions(other.version, current.version) === -1
).length === 0
}
function mapToData(libs) {
return Object.keys(libs)
.filter(key => key.indexOf('sm-web') === -1 && key.indexOf('debug') === -1)
.map(key => {
const lib = libs[key]
const name = lib.name
.replace(/#/g, '_')
.replace(/\./g, '_')
.replace(/\//g, '_')
const version = lib.version.replace(/#/g, '_').replace(/\//g, '_')
return {
name,
version,
}
})
}
function groupBy(objectArray, property) {
return objectArray.reduce((acc, obj) => {
const key = obj[property]
if (!acc[key]) {
acc[key] = []
}
acc[key].push(obj.version)
return acc
}, {})
}
function getPackets(path) {
return new Promise((resolve, reject) => {
checker.init(
{
start: path,
production: true,
customFormat: {
name: '',
version: '',
},
},
(err, packages) => {
if (err) {
// Handle error
console.log(err)
reject(err)
} else {
// The sorted package data
const packagesReduced = groupBy(mapToData(packages), 'name')
const higerVerionList = []
Object.keys(packagesReduced).forEach(key => {
const versions = packagesReduced[key]
const descVersions = versions.sort(compareVersions).reverse()
higerVerionList.push({ name: key, version: descVersions[0] })
})
resolve(higerVerionList)
}
}
)
})
}
function clonePrevious(tag) {
return new Promise((resolve, reject) => {
exec(
`git clone https://bitbucket.path_to_repo.git prevVersion && cd prevVersion && git checkout tags/${tag} && yarn && cd ..`,
async (error, stdout, stderr) => {
if (error) {
console.warn(error)
reject(error)
}
if (stdout) {
const prevPacks = await getPackets('./prevVersion')
resolve(prevPacks)
} else {
resolve(stderr)
}
}
)
})
}
async function sendReportToSlack(report) {
const web = new WebClient(token)
const res = await web.chat.postMessage({
channel,
text: report,
})
// `res` contains information about the posted message
console.log('Report sent: ', res.ts)
}
const readline = require('readline').createInterface({
input: process.stdin,
output: process.stdout,
})
console.log('Deleting prevVersion folder')
rimraf('prevVersion', async () => {
console.log('Done deleting prevVersion folder')
readline.question(
`What's the version tag for previous release?`,
async tag => {
console.log(`Start cloning and shit the release ${tag}!`)
readline.close()
prevReleasePackets = await clonePrevious(tag)
currReleasePackets = await getPackets('./')
const update = currReleasePackets.filter(
comparerVersionNo(prevReleasePackets)
)
const deleted = prevReleasePackets.filter(
comparerVersion(currReleasePackets)
)
const newPacks = currReleasePackets.filter(
comparerVersion(prevReleasePackets)
)
const slackMessage =
`*Packages changes from ${tag}*:\n\n` +
`*--Updated--*: ${JSON.stringify(
update
)},\n\n *--Deleted--*: ${JSON.stringify(
deleted
)} \n\n *--New--*: ${JSON.stringify(newPacks)}`
await sendReportToSlack(slackMessage)
}
)
})

Session leak on spanner insert from cloud function

I'm trying to insert data into spanner through cloud function, using post request. I thing that I'm doing everything as described in the documentation, and i just can't understand what causes the next error:
"Error: 1 session leak(s) detected.
at _requests.onIdle.then (/srv/node_modules/#google-cloud/spanner/build/src/session-pool.js:193:25)
at <anonymous>"
And there is my cloud function
const {Spanner} = require('#google-cloud/spanner');
module.exports.http = (req, res) => {
const projectId = 'project-id';
const instanceId = 'instance-id';
const databaseId = 'database-id';
const spanner = new Spanner({
projectId: projectId,
});
const instance = spanner.instance(instanceId);
const database = instance.database(databaseId);
let sqlResponse = "";
database.runTransaction(async (err, transaction) => {
if (err) {
res.status(500).send(JSON.stringify({message: err, requestBody: req.body}));
return;
}
try {
const data = req.body;
const [rowCount] = await transaction.runUpdate({
sql:
'INSERT Feedbacks (age, comment, gender, rating) VALUES (#age, #comment, #gender, #rating)',
params: {
age: data.age.toString(),
comment: data.comment,
gender: data.gender,
rating: data.rating.toString(),
},
});
sqlResponse = 'Successfully inserted ' + rowCount + ' record into the Feedbacks table.';
await transaction.commit();
res.status(200).send(JSON.stringify({message: sqlResponse, requestBody: req.body}));
} catch (err) {
res.status(500).send(JSON.stringify({message: err, requestBody: req.body}));
} finally {
database.close();
}
});
};
Your code appears to be correct. As noted by #Mayeru in the comments for your question, the first thing to confirm is that you're inserting a new record with a unique value specified for the column that is your table's primary key column.
Another possibility that could be causing the issue you are encountering is that you are trying to test the function using the "Testing" UI of the Cloud Console's Cloud Functions > "Function details" section. If so then you may be either using an empty request body or a malformed request body when you click the "Test the function" button. In the "Triggering event" textarea that appears above the "Test the function" button, make sure you have entered a valid JSON request body which includes the elements and values that your INSERT statement expects.
For example a "Triggering event" JSON request body like the following should work:
{"singerId":"1001","firstName":"Test","lastName":"Singer"}
Using the following "nodeInsert" function that's similar to the code you've shared:
const {Spanner} = require('#google-cloud/spanner');
module.exports.nodeInsert = (req, res) => {
const projectId = 'my-project';
const instanceId = 'my-instance';
const databaseId = 'my-database';
const spanner = new Spanner({
projectId: projectId,
});
const instance = spanner.instance(instanceId);
const database = instance.database(databaseId);
let sqlResponse = "";
database.runTransaction(async (err, transaction) => {
if (err) {
res
.status(500)
.send(JSON.stringify({message: err, requestBody: req.body}));
transaction.end();
console.error('Transaction terminated.');
return;
}
try {
const data = req.body;
const parsedSingerId = parseInt(data.singerId, 10);
const [rowCount] = await transaction.runUpdate({
sql:
'INSERT Singers (SingerId, FirstName, LastName) VALUES (#singerId, #firstName, #lastName)',
params: {
singerId: parsedSingerId,
firstName: data.firstName,
lastName: data.lastName,
},
});
sqlResponse = 'Successfully inserted ' + rowCount + ' record into the Singers table.';
await transaction.commit();
res
.status(200)
.send(JSON.stringify({message: sqlResponse, requestBody: req.body}));
} catch (err) {
res
.status(500)
.send(JSON.stringify({message: err, requestBody: req.body}));
transaction.end();
console.error('Transaction terminated.');
} finally {
database.close();
}
});
};

Google Cloud functions (with Pubsub) error

We are using GCP with Cloud functions (with Pubsub topic triggers), and though it works fine most of the time, we do see it often throwing the following authentication error:
Error: Request had invalid authentication credentials. Expected OAuth 2 access token, login cookie or other valid authentication credential. See https://developers.google.com/identity/sign-in/web/devconsole-project.
at (/user_code/node_modules/#google-cloud/pubsub/node_modules/grpc/src/node/src/client.js:569)
Has anyone seen this or know what causes it or how to fix?
Cloud function code:
require('dotenv').config()
const datastore = require('#google-cloud/datastore')()
const pubsub = require('#google-cloud/pubsub')()
const promiseRetry = require('promise-retry')
const elasticsearch = require('elasticsearch')
const SupplierSearchManager = require('shared-managers').SupplierSearchManager
const BaseManager = require('shared-managers').BaseManager
const Util = require('shared-managers').Util
const StatsManager = require('shared-managers').StatsManager
var unitTestMode = false
var searchManagerMock, pubsubMock
exports.setUnitTestMode = (searchManagerMockP, pubsubMockP) => {
unitTestMode = true
searchManagerMock = searchManagerMockP
pubsubMock = pubsubMockP
}
exports.updateElastic = event => {
let dataObject
try {
dataObject = JSON.parse(Buffer.from(event.data.data, 'base64').toString())
} catch (err) {
console.log(err)
console.log(event.data.data)
return Promise.reject(err)
}
const baseManager = new BaseManager(datastore, new Util('base-manager'), pubsub)
const statsManager = new StatsManager(baseManager, new Util('stats-manager'))
let supplierId = dataObject.supplierId
let retryCount = dataObject.retryCount
let refresh = dataObject.refresh === true
if (!retryCount) retryCount = 0
return new Promise((resolve, reject) => {
let elasticClient = new elasticsearch.Client({
host: process.env.ELASTIC_HOST,
httpAuth: process.env.ELASTIC_AUTH
})
const supplierSearchManager = unitTestMode ? searchManagerMock : new SupplierSearchManager(elasticClient, baseManager, statsManager)
supplierSearchManager.indexSupplier(process.env.ELASTIC_INDEX, supplierId, refresh).then(resolve, err => {
console.log(err)
if (retryCount < 2) {
const topic = unitTestMode ? pubsubMock : pubsub.topic(process.env.PUBSUB_PREFIX + process.env.PUBSUB_TOPIC_ELASTIC)
promiseRetry({retries: 4, maxTimeout: 8000}, (retry, number) => {
return topic.publish({ supplierId: supplierId, retryCount: retryCount + 1 }).catch(err => {
retry(err)
})
}).then(resolve, err => {
console.log(err)
reject(err)
})
} else {
resolve(true)
}
})
})
}