I tried sending the command from cloud functions, I am getting Error: The service is currently unavailable.
Package.JSON
"dependencies": {
"firebase-admin": "~6.0.0",
"firebase-functions": "^2.0.3",
"googleapis": "34.0.0"
}
const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${reqData.registryId}`;
const binaryData = Buffer.from(JSON.stringify(reqData.message)).toString('base64');
const request = {
name: `${registryName}/devices/${reqData.deviceId}`,
binaryData: binaryData
};
google.auth.getClient().then((authClient) => {
const discoveryUrl =
`${DISCOVERY_API}?version=${API_VERSION}`;
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
// Scopes can be specified either as an array or as a single,
// space-delimited string.
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform'
]);
}
google.options({
auth: authClient
});
google.discoverAPI(discoveryUrl).then((client, err) => {
if (err) {
console.log('Error during API discovery', err);
return undefined;
}
client.projects.locations.registries.devices.sendCommandToDevice(request,
(err, data) => {
if (err) {
console.log('Could not send command:', request);
console.log('Message: ', err);
} else {
console.log('Success :', data.statusText);
}
});
});
});
Logs:
{ Error: The service is currently unavailable. at createError (/user_code/node_modules/googleapis/node_modules/axios/lib/core/createError.js:16:15) at settle (/user_code/node_modules/googleapis/node_modules/axios/lib/core/settle.js:18:12) at Unzip.handleStreamEnd (/user_code/node_modules/googleapis/node_modules/axios/lib/adapters/http.js:201:11) at emitNone (events.js:91:20) at Unzip.emit (events.js:185:7) at endReadableNT (_stream_readable.js:974:12) at _combinedTickCallback (internal/process/next_tick.js:80:11) at process._tickDomainCallback (internal/process/next_tick.js:128:9)
The problem is that subfolder MUST be specified, and MUST not be an empty string.
As I was using this in a Firebase function, I just use the firebase subfolder for any commands being sent that do not have a specific subfolder
const request = {
name: `${registryName}/devices/${deviceId}`,
binaryData: Buffer.from(JSON.stringify(commandMessage)).toString("base64"),
subfolder: 'firebase'
}
Here's functions deps:
"dependencies": {
"firebase-admin": "^6.4.0",
"firebase-functions": "^2.1.0",
"fs-extra": "^7.0.0",
"googleapis": "^36.0.0",
},
This is probably due to
bug in the node library
bug in Google's endpoint
Lack of testing on Google's part
Seems that Google's "IoT" is still very young and needs a lot of work
I'm not too familiar with Firebase cloud functions, but I didn't get the error using the inline editor for Cloud Functions (https://console.cloud.google.com/functions). Can you tell me when you started getting this error (and if you're still encountering it)?
For reference, here was the code that I used (basically what you had but with more explicit definitions for projectId, cloudRegion.
const {google} = require('googleapis');
const API_VERSION = 'v1';
const DISCOVERY_API = 'https://cloudiot.googleapis.com/$discovery/rest';
exports.sendCommand = (req, res) => {
let reqData = req.body;
const projectId = reqData.projectId || process.env.GCLOUD_PROJECT;
const cloudRegion = reqData.cloudRegion || process.env.GCLOUD_REGION;
const parentName = `projects/${projectId}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${reqData.registryId}`;
const binaryData = Buffer.from(JSON.stringify(reqData.message)).toString('base64');
const request = {
name: `${registryName}/devices/${reqData.deviceId}`,
binaryData: binaryData
};
google.auth.getClient().then((authClient) => {
const discoveryUrl =
`${DISCOVERY_API}?version=${API_VERSION}`;
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
// Scopes can be specified either as an array or as a single,
// space-delimited string.
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform'
]);
}
google.options({
auth: authClient
});
google.discoverAPI(discoveryUrl).then((client, err) => {
if (err) {
console.log('Error during API discovery', err);
return undefined;
}
client.projects.locations.registries.devices.sendCommandToDevice(request,
(err, data) => {
if (err) {
console.log('Could not send command:', request);
console.log('Message: ', err);
} else {
console.log('Success :', data.statusText);
}
});
});
});
res.status(200).send(reqData.message);
};
Related
update
Ok so I am having a authentication issue with passport/JWT when trying to grab the header after it got set with login. So JWT should return an ID and I am trying to grab that ID and use it to update a user profile with a collection Image upload. Here is where it gets weird. I get this error in the console:
you are not valid
node:internal/errors:464
ErrorCaptureStackTrace(err);
^
Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
at new NodeError (node:internal/errors:371:5)
at ServerResponse.setHeader (node:_http_outgoing:576:11)
at ServerResponse.header (C:\Users\tquig\OneDrive\Documents\GitHub\Team2\backend\node_modules\express\lib\response.js:776:10)
at ServerResponse.send (C:\Users\tquig\OneDrive\Documents\GitHub\Team2\backend\node_modules\express\lib\response.js:170:12)
at ServerResponse.json (C:\Users\tquig\OneDrive\Documents\GitHub\Team2\backend\node_modules\express\lib\response.js:267:15)
at C:\Users\tquig\OneDrive\Documents\GitHub\Team2\backend\routes\users.js:239:29
at processTicksAndRejections (node:internal/process/task_queues:96:5) {
code: 'ERR_HTTP_HEADERS_SENT'
}
Now insomnia gives me back this error:
"AwsError: MissingRequiredParameter: Missing required key 'Key' in params"
I am not sure if AWS is just hating the JWT token or if my JWT id return isn't really returning properly and I am not doing it right in general.
Here is the AWS middleware:
const S3 = require('aws-sdk/clients/s3')
const fs = require('fs')
const bucketName = process.env.bucketName
const region = process.env.bucketRegion
const accessKeyId = process.env.AWSAccessKeyId
const secretAccessKey = process.env.AWSSecretKey
const s3 = new S3({
region,
accessKeyId,
secretAccessKey
})
function uploadFile(file){
const fileStream = fs.createReadStream(file.path)
const uploadParams = {
Bucket: bucketName,
Body: fileStream,
key: file.filename
}
return s3.upload(uploadParams).promise()
}
exports.uploadFile = uploadFile
//downloads a file from s3
//not tested yet
/*
function getFileStream(fileKey){
const downloadParams = {
key: fileKey
bucket: bucketName
}
return s3.getObject(downloadParams).createReadStream()
}
exports.getFileStream = getFileStream
*/
Here is the authentication code:
require('dotenv').config();
const jwt = require('jsonwebtoken');
const mongoose = require('mongoose');
// middleware functionality to check logged in user
module.exports = async (req, res, next) => {
try{
const token = req.cookies.jwt;
if(!token) return res.status(401).json({errorMessage: "Unauthorized"});
const verified = jwt.verify(token, process.env.secretKey);
req.user = verified.id;
next();
} catch (err){
console.error(err);
res.status(401).json({errorMessage: "Unauthorized"});
}
}
and here is the route:
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, "uploads");
},
filename: function (req, file, cb) {
cb(
null,
file.fieldname + "-" + Date.now() + path.extname(file.originalname)
);
},
});
const upload = multer({
storage: storage,
fileFilter: (req, file, cb) => {
if (file.mimetype == "image/png" || file.mimetype == "image/jpg" || file.mimetype == "image/jpeg") {
cb(null, true);
} else {
cb(null, false);
return cb(new Error('Only .png, .jpg and .jpeg format allowed!'));
}
} });
router.post("/collections", requireLogin, upload.single("myImage"), async (req, res) => {
const obj = {
img: {
data: req.file.filename,
contentType: req.file.contentType
}
}
const newCollection = new collections({
imgName: req.file.filename,
image: obj.img
});
const findBool = user.findById(req.body.id)
.then(() => res.json('found user'))
.catch(err => res.status(400).json('UserIdError: ' + err));
//need another check before uploading to aws to prevent unauthorized uploads
if (findBool){
//upload to aws
await uploadFile(req.file).then(() => res.json('AWS upload Complete'))
.catch(err => res.status(400).json('AwsError: ' + err))
//mongodb upload
try {
await user.findbyId(req.body.id).insertOne(newCollection)
} catch (error) {
res.status(400).json('updateError: ' + error)
}
//delete file from local storage
unlinkFile(file.path)
}
});
I'm trying to migrate an Express endpoint from v2 to v3 of the aws-sdk for JavaScript. The endpoint is a file downloader for AWS S3.
In version 2, I passed the result of GetObject back to the browser in a readable stream. In version 3 that same technique fails with the error:
TypeError: data.Body.createReadStream is not a function
How do I work with the data that returned from the new GetObjectCommand? Is it a blob? I'm struggling to find anything useful in the v3 SDK docs.
Here are the two versions of the endpoint:
import AWS from 'aws-sdk'
import dotenv from 'dotenv'
import { GetObjectCommand, S3Client } from '#aws-sdk/client-s3'
dotenv.config()
// VERSION 3 DOWNLOADER - FAILS
const getFileFromS3v3 = async (req, res) => {
const client = new S3Client({ region: 'us-west-2' })
const params = {
Bucket: process.env.AWS_BUCKET,
Key: 'Tired.pdf',
}
const command = new GetObjectCommand(params)
try {
const data = await client.send(command)
console.log(data)
data.Body.createReadStream().pipe(res)
} catch (error) {
console.log(error)
}
}
// VERSION 2 DOWNLOADER - WORKS
const getFileFromS3 = async (req, res) => {
const filename = req.query.filename
var s3 = new AWS.S3()
var s3Params = {
Bucket: process.env.AWS_BUCKET,
Key: 'Tired.pdf',
}
// if the file header exists, stream the file to the response
s3.headObject(s3Params, (err) => {
if (err && err.code === 'NotFound') {
console.log('File not found: ' + filename)
} else {
s3.getObject(s3Params).createReadStream().pipe(res)
}
})
}
export { getFileFromS3, getFileFromS3v3 }
This version 3 code works. Thanks to a major assist, the trick was to pipe data.Body and not use any of the fileStream methods.
import { GetObjectCommand, S3Client } from '#aws-sdk/client-s3'
import dotenv from 'dotenv'
dotenv.config()
const getFileFromS3 = async (req, res) => {
const key = req.query.filename
const client = new S3Client({ region: process.env.AWS_REGION })
const params = {
Bucket: process.env.AWS_BUCKET,
Key: key,
}
const command = new GetObjectCommand(params)
try {
const data = await client.send(command)
data.Body.pipe(res)
} catch (error) {
console.log(error)
}
}
export { getFileFromS3 }
When called from this frontend function the code above returns the file from S3 to the browser.
const downloadFile = async (filename) => {
const options = {
url: `/api/documents/?filename=${filename}`,
method: 'get',
responseType: 'blob',
}
try {
const res = await axios(options)
fileDownload(res.data, filename)
} catch (error) {
console.log(error)
}
}
I have a Lambda which looks like so:
module.exports.handler = (event, context, callback) => {
AWS.config.setPromisesDependency(null);
const uploadPromise = s3.upload(params).promise();
uploadPromise.then((data) => {
const response = {
...
};
return response;
}).catch((error) => {
console.log(error);
});
};
Calling it from Postman results in server error in Postman. CloudWatch logs have no further info.
Doing:
s3.upload(params, (error, data) => {
if (error) {
console.error("error occurred storing to s3: ", error);
return;
}
const response = {
...
};
return response;
});
does not result in a server error.
I am trying to follow the information from AWS that can be found here:
https://aws.amazon.com/blogs/developer/support-for-promises-in-the-sdk/
Postman is able to upload to Lambda by doing the following with async/await and try/catch:
exports.handler = async function(event, context) {
const s3 = new AWS.S3();
const encodedImage = util.inspect(event.body);
const decodedImage = Buffer.from(encodedImage, "base64");
const filePath = "test.png";
const params = {
Body: decodedImage,
Bucket: "my bucket",
Key: filePath,
ACL: "public-read",
ContentType: "mime/png"
};
try {
const result = await s3.upload(params).promise();
const response = {
statusCode: 200,
headers: {
my_header: "my_value"
},
body: JSON.stringify(result),
isBase64Encoded: false
};
return response;
} catch (error) {
console.log('error')
}
};
This code works just fine locally using nodejs. Images download from s3, write to file.
However, in Lambda (using nodejs 8.10) I'm getting "Internal Server Error" when testing the function with this in the Logs:
"Execution failed due to configuration error: Malformed Lambda proxy response"
I am using the lambda proxy response in the callback, but clearly some AWS SDK error with S3 is not getting caught.
I do have a role setup with S3 full access that the Lambda has access to.
What am I missing with my first Lambda function? Docs and tutorials I've followed correctly and it is not working.
const async = require('async')
const aws = require('aws-sdk')
const fs = require('fs')
const exec = require('child_process').exec
const bucket = 'mybucket'
const s3Src = 'bucket_prefix'
const s3Dst = 'new_prefix'
const local = `${__dirname}/local/`
aws.config.region = 'us-west-2'
const s3 = new aws.S3()
exports.handler = async (event, context, callback) => {
const outputImage = 'hello_world.png'
const rack = JSON.parse(event.body)
const images = my.images
async.waterfall([
function download(next) {
let downloaded = 0
let errors = false
let errorMessages = []
for (let i = 0; i < images.length; i++) {
let key = `${s3Src}/${images[i].prefix}/${images[i].image}`,
localImage = `${local}${images[i].image}`
getBucketObject(bucket, key, localImage).then(() => {
++downloaded
if (downloaded === images.length) { // js is non blocking, need to check if all images have been downloaded. If so, then go to next function
if (errors) {
next(errorMessages.join(' '))
} else {
next(null)
}
}
}).catch(error => {
errorMessages.push(`${error} - ${localImage}`)
++downloaded
errors = true
})
}
function getBucketObject(bucket, key, dest) {
return new Promise((resolve, reject) => {
let ws = fs.createWriteStream(dest)
ws.once('error', (err) => {
return reject(err)
})
ws.once('finish', () => {
return resolve(dest)
})
let s3Stream = s3.getObject({
Bucket: bucket,
Key: key
}).createReadStream()
s3Stream.pause() // Under load this will prevent first few bytes from being lost
s3Stream.on('error', (err) => {
return reject(err)
})
s3Stream.pipe(ws)
s3Stream.resume()
})
}
}
], err => {
if (err) {
let response = {
"statusCode": 400,
"headers": {
"my_header": "my_value"
},
"body": JSON.stringify(err),
"isBase64Encoded": false
}
callback(null, response)
} else {
let response = {
"statusCode": 200,
"headers": {
"my_header": "my_value"
},
"body": JSON.stringify(`<img src="${local}${outputImage}" />`),
"isBase64Encoded": false
}
callback(null, response)
}
}
)
}
Response should be always sent to callback function. Your code sends response only on error. That's why Lambda executor thinks your code fails.
BTW - should your functions in async.waterfall be separated with coma, as two tasks?
Locally, I've been running nodejs 10.10 and lambda currently is at 8.10. That is a big part I'm sure. In the end I had to remove the async. I had to move the getBucketObject function out of the waterfall. Once I made those adjustments it started working. And another issue was the downloaded images needed to go into "/tmp" directory.
const aws = require('aws-sdk')
const async = require('async')
const fs = require('fs')
const bucket = 'mybucket'
const s3Src = 'mys3src'
const local = '/tmp/'
aws.config.region = 'us-west-2'
const s3 = new aws.S3()
exports.handler = (event, context, callback) => {
const outputImage = 'hello_world.png'
async.waterfall([
function download(next) {
let downloaded = 0,
errorMessages = []
for (let i = 0; i < event['images'].length; i++) {
let key = `${s3Src}/${event['images'][i]['prefix']}/${event['images'][i]['image']}`,
localImage = `${local}${event['images'][i]['image']}`
getBucketObject(bucket, key, localImage).then(() => {
downloaded++
if (downloaded === event['images'].length) {
if (errorMessages.length > 0) {
next(errorMessages.join(' '))
} else {
console.log('All downloaded')
next(null)
}
}
}).catch(error => {
downloaded++
errorMessages.push(`${error} - ${localImage}`)
if (downloaded === event['images'].length) {
next(errorMessages.join(' '))
}
})
}
}
], err => {
if (err) {
console.error(err)
callback(null, {
"statusCode": 400,
"body": JSON.stringify(err),
"isBase64Encoded": false
})
} else {
console.log('event image created!')
callback(null, {
"statusCode": 200,
"body": JSON.stringify(`<img src="${local}${outputImage}" />`),
"isBase64Encoded": false
})
}
}
)
}
function getBucketObject(bucket, key, dest) {
return new Promise((resolve, reject) => {
let ws = fs.createWriteStream(dest)
ws.once('error', (err) => {
return reject(err)
})
ws.once('finish', () => {
return resolve(dest)
})
let s3Stream = s3.getObject({
Bucket: bucket,
Key: key
}).createReadStream()
s3Stream.pause() // Under load this will prevent first few bytes from being lost
s3Stream.on('error', (err) => {
return reject(err)
})
s3Stream.pipe(ws)
s3Stream.resume()
})
}
So, I'm using AWS = Npm.require('aws-sdk'); from Amazon.
Apparently promise are implemented. http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Request.html#promise-property
See my functions (Server):
s3 = new AWS.S3({ apiVersion: "2006-03-01" });
bucketExists = (bucket) => {
check(bucket, String);
const params = {
Bucket: bucket
};
return s3.putObject({Bucket: 'bucket', Key: 'key'}).promise();
};
Meteor.methods({
addPhoto(userId, photo) {
check(userId, String);
check(photo, Object);
bucketExists(userId)
.then(
function(value) {
console.log("Contents: " + value);
})
.catch(function(reason) {
console.error("Error or timeout", reason);
});
}
});
Exeption:
Exception while invoking method 'addPhoto' TypeError: Object [object Object] has no method 'promise'.
WTF
So, I try without call promise(). Like that:
bucketExists = (bucket) => {
check(bucket, String);
const params = {
Bucket: bucket
};
return new Promise(
(resolve, reject) => {
s3.waitFor('bucketExists', params, (err, res) => {
if (err) reject(err);
else resolve(res);
});
});
};
But nothing appear. I start with ES6. So please help.
Thanks