I am trying to disable users from my node JS backend using the AWS SDK v3. everything works normally except for the disable/enable user command.
I have tried everything I know but here is my code snippet:
const aws_creds = {
accessKeyId: process.env.ACCESS_KEY_ID,
secretAccessKey: process.env.SECRET_ACCESS_KEY,
};
const cognitoConfig = {
region: process.env.REGION,
credentials: aws_creds,
};
const cognito_v3 = new CognitoIdentityProviderClient(cognitoConfig);
Then in my route I am running this try / catch block
try {
if (status === "enable") {
const enableUserCommand = new AdminEnableUserCommand(userDetails);
const enableUserResults = await cognito_v3.send(enableUserCommand);
return res.status(200).json(enableUserResults);
}
const disableUserCommand = new AdminDisableUserCommand(userDetails);
const disableUserResults = await cognito_v3.send(disableUserCommand);
return res.status(200).json(disableUserResults);
} catch (err) {
console.log(err);
return res.status(400).json(err);
}
However, the err console log returns this:
TypeError: Cannot read property 'byteLength' of undefined
at Object.fromArrayBuffer (C:\Users\SER-01\Documents\ctr\simsim\simsim-backend\lambdas\simsim-auth\node_modules\#aws-sdk\util-buffer-from\dist\cjs\index.js:6:60)
at castSourceData (C:\Users\SER-01\Documents\ctr\simsim\simsim-backend\lambdas\simsim-auth\node_modules\#aws-sdk\hash-node\dist\cjs\index.js:29:31)
at Hash.update (C:\Users\SER-01\Documents\ctr\simsim\simsim-backend\lambdas\simsim-auth\node_modules\#aws-sdk\hash-node\dist\cjs\index.js:12:26)
at hmac (C:\Users\SER-01\Documents\ctr\simsim\simsim-backend\lambdas\simsim-auth\node_modules\#aws-sdk\signature-v4\dist\cjs\credentialDerivation.js:60:10)
at Object.getSigningKey (C:\Users\SER-01\Documents\ctr\simsim\simsim-backend\lambdas\simsim-auth\node_modules\#aws-sdk\signature-v4\dist\cjs\credentialDerivation.js:32:29)
at SignatureV4.getSigningKey (C:\Users\SER-01\Documents\ctr\simsim\simsim-backend\lambdas\simsim-auth\node_modules\#aws-sdk\signature-v4\dist\cjs\SignatureV4.js:139:39)
at SignatureV4.signRequest (C:\Users\SER-01\Documents\ctr\simsim\simsim-backend\lambdas\simsim-auth\node_modules\#aws-sdk\signature-v4\dist\cjs\SignatureV4.js:98:73)
at async C:\Users\SER-01\Documents\ctr\simsim\simsim-backend\lambdas\simsim-auth\node_modules\#aws-sdk\middleware-signing\dist\cjs\middleware.js:14:22
at async StandardRetryStrategy.retry (C:\Users\SER-01\Documents\ctr\simsim\simsim-backend\lambdas\simsim-auth\node_modules\#aws-sdk\middleware-retry\dist\cjs\defaultStrategy.js:56:46)
at async C:\Users\SER-01\Documents\ctr\simsim\simsim-backend\lambdas\simsim-auth\node_modules\#aws-sdk\middleware-logger\dist\cjs\loggerMiddleware.js:6:22
at async C:\Users\SER-01\Documents\ctr\simsim\simsim-backend\lambdas\simsim-auth\src\routes\auth.js:232:33 {
'$metadata': { attempts: 1, totalRetryDelay: 0 }
}
Any idea why?
I followed following steps while trying to run android app test via AWS Lambda Node.JS
Created a project
Created an upload
Uploaded APK to signed url
Once upload was done I created device pool using following params
var createDevicePoolParams = {
name: "DAP_Device_Pool",
description: "DAP_Android_Devices",
projectArn: projectARN,
rules: [{
attribute: "PLATFORM",
operator: "EQUALS",
value: "\"ANDROID\""
}]
};
Then I called schedulerun with following params
var scheduleRunParams = {
appArn: uploadARN,
name: "tarunRun",
devicePoolArn: devicePoolARN,
projectArn: projectARN,
test: {
type: "BUILTIN_FUZZ",
}
};
But I am getting error of missing or unprocessed resources.
I am not able to understand what I am missing. My understanding is that If I am using built in fuzz testing type then I dont need to upload any custom testcases.
Can somebody pls help pointing out what step is missing
Then
After your uploads have been processed by Device Farm, call aws devicefarm schedule-run
[update]
I put this code in a AWS Lambda function and it worked there as well. Here is a gist of it:
https://gist.github.com/jamesknowsbest/3ea0e385988b0098e5f9d38bf5a932b6
Here is the code I just authored and it seems to work with the Built-inFuzz/Explorer tests
// assume we already executed `npm install aws-sdk`
var AWS = require('aws-sdk');
// assumes `npm install https`
const request = require("request");
// assumes `npm install fs`
const fs = require('fs');
// https://stackoverflow.com/a/41641607/8016330
const sleep = (waitTimeInMs) => new Promise(resolve => setTimeout(resolve, waitTimeInMs));
// Device Farm is only available in the us-west-2 region
var devicefarm = new AWS.DeviceFarm({ region: 'us-west-2' });
(async function() {
let project_params = {
name: "test of fuzz tests"
};
let PROJECT_ARN = await devicefarm.createProject(project_params).promise().then(
function(data){
return data.project.arn;
},
function (error) {
console.error("Error creating project", "Error: ", error);
}
);
console.log("Project created ", "Project arn: ", PROJECT_ARN);
// create the upload and upload files to the project
let params = {
name: "app-debug.apk",
type: "ANDROID_APP",
projectArn: PROJECT_ARN
};
let UPLOAD = await devicefarm.createUpload(params).promise().then(
function(data){
return data.upload;
},
function(error){
console.error("Creating upload failed with error: ", error);
}
);
let UPLOAD_ARN = UPLOAD.arn;
let UPLOAD_URL = UPLOAD.url;
console.log("upload created with arn: ", UPLOAD_ARN);
console.log("uploading file...");
let options = {
method: 'PUT',
url: UPLOAD_URL,
headers: {},
body: fs.readFileSync("/path/to/your/apk/file")
};
// wait for upload to finish
await new Promise(function(resolve,reject){
request(options, function (error, response, body) {
if (error) {
console.error("uploading file failed with error: ", error);
reject(error);
}
resolve(body);
});
});
//get the status of the upload and make sure if finished processing before scheduling
let STATUS = await getStatus(UPLOAD_ARN);
console.log("upload status is: ", STATUS);
while(STATUS !== "SUCCEEDED"){
await sleep(5000);
STATUS = await getStatus(UPLOAD_ARN);
console.log("upload status is: ", STATUS);
}
//create device pool
let device_pool_params = {
projectArn: PROJECT_ARN,
name: "Google Pixel 2",
rules: [{"attribute": "ARN","operator":"IN","value":"[\"arn:aws:devicefarm:us-west-2::device:5F20BBED05F74D6288D51236B0FB9895\"]"}]
}
let DEVICE_POOL_ARN = await devicefarm.createDevicePool(device_pool_params).promise().then(
function(data){
return data.devicePool.arn;
},function(error){
console.error("device pool failed to create with error: ",error);
}
);
console.log("Device pool created successfully with arn: ", DEVICE_POOL_ARN);
//schedule the run
let schedule_run_params = {
name: "MyRun",
devicePoolArn: DEVICE_POOL_ARN, // You can get the Amazon Resource Name (ARN) of the device pool by using the list-pools CLI command.
projectArn: PROJECT_ARN, // You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command.
test: {
type: "BUILTIN_FUZZ"
},
appArn: UPLOAD_ARN
};
let schedule_run_result = await devicefarm.scheduleRun(schedule_run_params).promise().then(
function(data){
return data.run;
},function(error){
console.error("Schedule run command failed with error: ", error);
}
);
console.log("run finished successfully with result: ", schedule_run_result);
})();
async function getStatus(UPLOAD_ARN){
return await devicefarm.getUpload({arn: UPLOAD_ARN}).promise().then(
function(data){
return data.upload.status;
},function(error){
console.error("getting upload failed with error: ", error);
}
);
}
Ouput is:
Project created Project arn: arn:aws:devicefarm:us-west-2:111122223333:project:b9233b49-967e-4b09-a51a-b5c4101340a1
upload created with arn: arn:aws:devicefarm:us-west-2:111122223333:upload:b9233b49-967e-4b09-a51a-b5c4101340a1/48ffd115-f7d7-4df5-ae96-4a44911bff65
uploading file...
upload status is: INITIALIZED
upload status is: SUCCEEDED
Device pool created successfully with arn: arn:aws:devicefarm:us-west-2:111122223333:devicepool:b9233b49-967e-4b09-a51a-b5c4101340a1/c0ce1bbc-7b40-4a0f-a419-ab024a6b1000
run finished successfully with result: { arn:
'arn:aws:devicefarm:us-west-2:111122223333:run:b9233b49-967e-4b09-a51a-b5c4101340a1/39369894-3829-4e14-81c9-bdfa02c7e032',
name: 'MyRun',
type: 'BUILTIN_FUZZ',
platform: 'ANDROID_APP',
created: 2019-06-06T23:51:13.529Z,
status: 'SCHEDULING',
result: 'PENDING',
started: 2019-06-06T23:51:13.529Z,
counters:
{ total: 0,
passed: 0,
failed: 0,
warned: 0,
errored: 0,
stopped: 0,
skipped: 0 },
totalJobs: 1,
completedJobs: 0,
billingMethod: 'METERED',
seed: 982045377,
appUpload:
'arn:aws:devicefarm:us-west-2:111122223333:upload:b9233b49-967e-4b09-a51a-b5c4101340a1/48ffd115-f7d7-4df5-ae96-4a44911bff65',
eventCount: 6000,
jobTimeoutMinutes: 150,
devicePoolArn:
'arn:aws:devicefarm:us-west-2:111122223333:devicepool:b9233b49-967e-4b09-a51a-b5c4101340a1/c0ce1bbc-7b40-4a0f-a419-ab024a6b1000',
radios: { wifi: true, bluetooth: false, nfc: true, gps: true } }
HTH
-James
I am trying to create HTTP API in Cloud Function - that eventually published a message t PubSub. Understood, that there is PubSub REST API - but it enforced me to set up the authentication (in client side) - that I would like to skip and move it to the server side.
Below code is deployed as Google Cloud Function with this command gcloud functions deploy helloGET --runtime nodejs8 --trigger-http
But while tested in browser, it is errored out Error: could not handle the request
Any suggestion is appreciated, thanks!
"use strict";
// [START functions_pubsub_setup]
const { PubSub } = require("#google-cloud/pubsub");
// Instantiates a client
const pubsub = new PubSub();
// [END functions_pubsub_setup]
const Buffer = require("safe-buffer").Buffer;
exports.helloGET = (req, res) => {
const topic = pubsub.topic("projects/myproject/topics/openit");
const message = {
data: {
message: "req.body.message"
}
};
// Publishes a message
res.send(
topic
.publish(message)
.then(() => res.status(200).send("Message published."))
.catch(err => {
err = `Catch block ... ${err}`;
console.error(err);
res.status(500).send(err);
return Promise.reject(err);
})
);
};
Below code will work. But it will take around 30 seconds or plus for the subscriber to receive the event - it is way too slow for my used case :S
"use strict";
const { PubSub } = require("#google-cloud/pubsub");
const pubsub = new PubSub();
const Buffer = require("safe-buffer").Buffer;
exports.helloGET = async (req, res) => {
var toPublish = `hello ${Date.now()}`;
publishMessage("_REPLACE_WITH_TOPIC_NAME_", toPublish);
res.send(`Published ${toPublish}`);
};
async function publishMessage(topicName, data) {
console.log("=> publishMessage, data = ", data);
const dataBuffer = Buffer.from(data);
const topic = pubsub.topic(topicName);
const publisher = topic.publisher();
publisher.publish(dataBuffer, { a: "XYZ" }, function() {
console.log("Published eventually ...");
});
}
I want to get project-wide metadata set in compute engine within my GCP cloud function. Is this possible?
Here is my try:
metadata.js:
const request = require('request-promise');
async function getMetaData(attr) {
const url = `http://metadata.google.internal/computeMetadata/v1/project/attributes/${attr}`;
const options = {
headers: {
'Metadata-Flavor': 'Google'
}
};
return request(url, options)
.then(response => {
console.info(`Retrieve meta data successfully. meta data: ${response.body}`);
return response.body;
})
.catch(err => {
console.error('Retrieve meta data failed.', err);
});
}
async function retrieveMetaData() {
return {
IT_EBOOKS_API: await getMetaData('IT_EBOOKS_API')
};
}
module.exports = { getMetaData, retrieveMetaData };
cloud function index.js:
const { retrieveMetaData } = require('./metadata');
async function retrieveComputeMetadata(req, res) {
const envVars = await retrieveMetaData();
console.log('envVars: ', envVars);
res.status(200).json(envVars);
}
exports.retrieveComputeMetadata = retrieveComputeMetadata;
When I test the cloud function, the logs show me an error:
Retrieve meta data failed. { StatusCodeError: 404 - "404 page not found\n" at new StatusCodeError (/srv/node_modules/request-promise-core/lib/errors.js:32:15) at ....
It seems the url is not found.
The API you're looking to hit '..v1/project/attributes/' isn't available. As Cloud Functions run on GAE Standard the there are details around which endpoints are available here.
Trigger a cloud function whenever a new file is uploaded to cloud storage bucket. This function should call a dataproc job written in pyspark to read the file and load it to BigQuery.
I want to know how to call a google dataproc job from cloud function. Please suggest.
I was able to create a simple Cloud Function that triggers Dataproc Job on GCS create file event. In this example, the file in GCS contains a Pig query to execute. However you can follow Dataproc API documentation to create a PySpark version.
index.js:
exports.submitJob = (event, callback) => {
const google = require('googleapis');
const projectId = 'my-project'
const clusterName = 'my-cluster'
const file = event.data;
if (file.name) {
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
const queryFileUri = "gs://" + file.bucket + "/" + file.name
console.log("Using queryFileUri: ", queryFileUri);
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/userinfo.email'
]);
}
const dataproc = google.dataproc({ version: 'v1beta2', auth: authClient });
dataproc.projects.regions.jobs.submit({
projectId: projectId,
region: "global",
resource: {
"job": {
"placement": {"clusterName": clusterName},
"pigJob": {
"queryFileUri": queryFileUri,
}
}
}
}, function(err, response) {
if (err) {
console.error("Error submitting job: ", err);
}
console.log("Dataproc response: ", response);
callback();
});
});
} else {
throw "Skipped processing file!";
}
callback();
};
Make sure to set Function to execute to submitJob.
package.json:
{
"name": "sample-cloud-storage",
"version": "0.0.1",
"dependencies":{ "googleapis": "^21.3.0" }
}
The following blogpost gave me many ideas how to get started:
https://cloud.google.com/blog/big-data/2016/04/scheduling-dataflow-pipelines-using-app-engine-cron-service-or-cloud-functions