Could not convert parameter `tx` between node and runtime: Error decoding field CheckMortality.0 - blockchain

I tried to use polkadot-js libray to run a simple app for transferring tokens between Dusty accounts on plasm. Source code and package.json for running the snippet is found on this git repo
Node version: v12.8.2
Yarn version: 1.22.5
const { ApiPromise, WsProvider } = require('#polkadot/api')
const { Keyring } = require('#polkadot/keyring')
const plasmDefinitions = require('#plasm/types/interfaces/definitions');
const jsonDefinitions = require('./types.js')
const fs = require('fs')
const BN = require('bn.js')
startChain()
async function startChain() {
console.log("trying connection to ", process.env.RPC_URL)
const targetAddress = process.env.TARGET_ADDRESS
const provider = new WsProvider(process.env.RPC_URL)
const types = Object.values(plasmDefinitions).reduce(
(res, { types }) => ({ ...res, ...types }),
{},
);
const api = new ApiPromise({
provider,
types
});
api.on('connected', () => console.log(`Connected to ${process.env.RPC_URL}`));
api.on('disconnected', () => console.log(`Disconnected from ${process.env.RPC_URL}`));
await api.isReady;
const [chain, nodeName, nodeVersion] = await Promise.all([
api.rpc.system.chain(),
api.rpc.system.name(),
api.rpc.system.version()
])
console.log(`You are connected to chain ${chain} using ${nodeName} v${nodeVersion} - ${process.env.RPC_URL}`)
const keyring = new Keyring({ type: 'sr25519' })
const fromPair = keyring.addFromUri(process.env.PLASM_MNEMONIC)
const fromAmountUnits = new BN('1000000000000000')
const transfer = api.tx.balances.transfer(targetAddress, fromAmountUnits)
// send value
//const nonce = await api.query.system.accountNonce(process.env.FROM_ADDRESS)
const nonce = await api.rpc.system.accountNextIndex(process.env.FROM_ADDRESS)
console.log("got nonce", nonce)
const txHash = await transfer.signAndSend(fromPair, {nonce})
}
Stack trace:
trying connection to wss://rpc.dusty.plasmnet.io/ Connected to wss://rpc.dusty.plasmnet.io/ You are connected to chain Dusty using Plasm Node v1.6.1-cf15b11-x86_64-linux-gnu - wss://rpc.dusty.plasmnet.io/ got nonce Type { negative: 0, words: [ 0 ], length: 1, red: null, registry: TypeRegistry {} } 2021-02-05 21:00:27 RPC-CORE: submitExtrinsic(extrinsic: Extrinsic): Hash:: 1002: Verification Error: Execution: Could not convert parameter txbetween node and runtime: Error decoding field CheckMortality.0: RuntimeApi, Execution: Could not convert parametertxbetween node and runtime: Error decoding field CheckMortality.0 (node:13572) UnhandledPromiseRejectionWarning: Error: 1002: Verification Error: Execution: Could not convert parametertxbetween node and runtime: Error decoding field CheckMortality.0: RuntimeApi, Execution: Could not convert parametertxbetween node and runtime: Error decoding field CheckMortality.0 at RpcCoder._checkError (<mydir>\examples\plasm-simple-transfer\node_modules\#polkadot\api\node_modules\#polkadot\rpc-provider\coder\index.js:84:13) at RpcCoder.decodeResponse (<mydir>\examples\plasm-simple-transfer\node_modules\#polkadot\api\node_modules\#polkadot\rpc-provider\coder\index.js:47:10) at WsProvider.value (<mydir>\examples\plasm-simple-transfer\node_modules\#polkadot\api\node_modules\#polkadot\rpc-provider\ws\index.js:214:90) at W3CWebSocket.value [as onmessage] (<mydir>\examples\plasm-simple-transfer\node_modules\#polkadot\api\node_modules\#polkadot\rpc-provider\ws\index.js:194:153) at W3CWebSocket._dispatchEvent [as dispatchEvent] (<mydir>\examples\plasm-simple-transfer\node_modules\yaeti\lib\EventTarget.js:107:17) at W3CWebSocket.onMessage (<mydir>\examples\plasm-simple-transfer\node_modules\websocket\lib\W3CWebSocket.js:234:14) at WebSocketConnection.<anonymous> (<mydir>\examples\plasm-simple-transfer\node_modules\websocket\lib\W3CWebSocket.js:205:19) at WebSocketConnection.emit (events.js:315:20) at WebSocketConnection.processFrame (<mydir>\examples\plasm-simple-transfer\node_modules\websocket\lib\WebSocketConnection.js:554:26) at <mydir>\examples\plasm-simple-transfer\node_modules\websocket\lib\WebSocketConnection.js:323:40 (node:13572) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). To terminate the node process on unhandled promise rejection, use the CLI flag--unhandled-rejections=strict (see https://nodejs.org/api/cli.html#cli_unhandled_rejections_mode). (rejection id: 1) (node:13572) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejections that are not handled will terminate the Node.js process with a non-zero exit code.
Additionally when i try to do a clear run (delete node_modules and reinstall) i get the same message but with field CheckMortality replaced by CheckNonce. Fields returned by plasm-types library can be found in this file in json format.
I tried to figure it out what types are wrong by going through node templates respositories, but i couldnt figure it out. How can it be fixed?

I had to do a small change to type definitions to circumvent the problem. In other words replace:
const types = Object.values(plasmDefinitions).reduce(
(res, { types }) => ({ ...res, ...types }),
{},
);
const api = new ApiPromise({
provider,
types
});
for
const types = Object.values(plasmDefinitions).reduce(
(res, { types }) => ({ ...res, ...types }),
{},
);
types["Address"] = "IndicesLookupSource";
types["LookupSource"] = "IndicesLookupSource";
const api = await ApiPromise.create({
provider: provider,
types: types
});
More information about this issue can be found here and here

Related

Pass data from cloud task to a firebase cloud function - currently getting an error

My question is this: how do I call a Firebase Cloud Function from a Cloud task and pass a payload through?
I tried following the tutorial here. The only difference is that I'm using Cloud functions for Firebase instead of regular Cloud Functions.
Here is my cloud function.
const functions = require("firebase-functions");
exports.myFunction = functions.https.onRequest((req, res) => {
console.log(req.query);
res.send('success');
});
When I query the url in the browser with parameters ?myparams=data I can log 'data' so I know the cloud function is basically working.
But when I try to call it from my queue (below) I get:
SyntaxError: Unexpected token o in JSON at position 1
at JSON.parse (<anonymous>)
My guess is that req is undefined.
I've been looking at this SO question and I am wondering if it has something to do with needing to use bodyParser for onRequest functions.
HTTP Event Cloud Function: request body value is undefined
I'm also seeing that some people have CORS issues with their cloud functions, which seems like it might be related.
Here is the task queue code that should be sending the payload.
const seconds = 5;
const project = 'xxxxx-xxxxxxx';
const queue = 'xxxxx';
const location = 'us-west2';
const url = 'https://us-central1-xxxxx-xxxxx.cloudfunctions.net/writeDB';
const payload = 'My data';
const parent = client.queuePath(project, location, queue);
const task = {
httpRequest: {
httpMethod: "POST",
url: url,
body: Buffer.from(JSON.stringify(payload)).toString("base64"),
headers: {
"Content-Type": "application/json"
},
oidcToken: {
serviceAccountEmail
}
}
};
task.scheduleTime = {
seconds: seconds + Date.now() / 1000,
};
const request = {parent: parent, task: task};
await client.createTask(request)
.then(response => {
const task = response[0].name;
console.log(`Created task ${task}`);
return {'Response': String(response)}
})
.catch(err => {
console.error(`Error in createTask: ${err.message || err}`);
next()
});
It calls the function, but for some reason it results in the error and the payload isn't logged.
Can anyone help?
As always, I'm happy to clarify the question if anything is unclear. Thanks!
I was able to replicate your error and I managed to fix it by changing the content type headers from "application/json" to "text/plain". I have also removed the JSON.stringify() function in the body value because your payload variable is a String type. Below is my modified sample of your code:
const {CloudTasksClient} = require('#google-cloud/tasks');
// Instantiates a client.
const client = new CloudTasksClient();
const seconds = 5;
const serviceAccountEmail = "xxxx-xxxxx-xxxxxx#appspot.gserviceaccount.com";
const project = 'xxxx-xxxxxx';
const queue = "xx-xxxxx";
const location = 'us-central1';
const url = "https://us-central1-xxxxx-xxxxx.cloudfunctions.net/myFunction";
const payload = 'My Data';
const parent = client.queuePath(project, location, queue);
async function quickstart() {
const task = {
httpRequest: {
httpMethod: "POST",
url: url,
body: Buffer.from(payload).toString("base64"), // your previous code: body: Buffer.from(JSON.stringify(payload)).toString("base64"),
headers: {
"Content-Type": "text/plain"
},
oidcToken: {
serviceAccountEmail
}
}
};
task.scheduleTime = {
seconds: seconds + Date.now() / 1000,
};
const request = {parent: parent, task: task};
await client.createTask(request)
.then(response => {
const task = response[0].name;
console.log(`Created task ${task}`);
return {'Response': String(response)}
})
.catch(err => {
console.error(`Error in createTask: ${err.message || err}`);
next()
});
}
quickstart();
In Cloud Functions, I changed req.query to req.body to get the result from Cloud Tasks
const functions = require("firebase-functions");
exports.myFunction = functions.https.onRequest((req, res) => {
console.log(req.body);
console.log('success')
res.send('success');
});

NOT_FOUND(5): Instance Unavailable. HTTP status code 404

I got this error when task is trying processing.
This is my nodejs code
async function quickstart(message : any) {
// TODO(developer): Uncomment these lines and replace with your values.
const project = "";//projectid
const queue = "";//queuename
const location = "";//region
const payload = JSON.stringify({
id: message.id,
data: message.data,
attributes: message.attributes,
});
const inSeconds = 180;
// Construct the fully qualified queue name.
const parent = client.queuePath(project, location, queue);
const task = {
appEngineHttpRequest: {
headers: {"Content-type": "application/json"},
httpMethod: protos.google.cloud.tasks.v2.HttpMethod.POST,
relativeUri: "/api/download",
body: "",
},
scheduleTime: {},
};
if (payload) {
task.appEngineHttpRequest.body = Buffer.from(payload).toString("base64");
}
if (inSeconds) {
task.scheduleTime = {
seconds: inSeconds + Date.now() / 1000,
};
}
const request = {
parent: parent,
task: task,
};
console.log("Sending task:");
console.log(task);
// Send create task request.
const [response] = await client.createTask(request);
console.log(`Created task ${response.name}`);
console.log("Created task");
return true;
}
The task is created without issue. However, it didnt trigger my cloud function and I got 404 or unhandled exception in my cloud logs. I have no idea whats going wrong.
I also did test with gcloud cli without the issue. Gcloud cli able to trigger my cloud function based on provided url.

query on dynamodb through lambda using pk and sk returns undefined

I have a lambda function that makes a call to the dynamodb table using pk and sk. For some reason I am getting the return data as undefined. Upon checking the cloudwatch logs I see the below error messages. What could be the typo I am doing here?
2021-10-01T00:15:03.104Z cdd7201f-0c95-4283-9257-c07324998896 INFO BOL Data: undefined
2021-10-01T00:15:03.124Z cdd7201f-0c95-4283-9257-c07324998896 INFO BOL Error: TypeError: Cannot read property 'PK' of undefined
at getUserById (/var/task/getUserById.js:16:27)
at processTicksAndRejections (internal/process/task_queues.js:95:5)
Here is the lambda code the error is referring to -
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient();
async function getUserById(userId) {
console.log('USERID:',userId);
const params = {
TableName:"Bol",
KeyConditionExpression: 'PK = :hashKey and SK = :sortKey',
ExpressionAttributeValues: {
':hashKey': userId,
':sortKey': 'USER'
}
};
try {
const { Item } = await docClient.query(params).promise();
console.log('BOL Data:',Item);
return { id: Item.PK, name: Item.Data.displayName };
} catch(err) {
console.log("BOL Error: ", err);
}
}
module.exports = getUserById;
Below is the data I am supposed to receive on lambda -
Its the way you are Initialization the Item
you can try using the below methods to get the objects
try {
const Item = await docClient.query(params).promise();
Item.Items.forEach(function(item) {
let buffer=item.Data+" -- "+item.PK;
console.log("buffer ",buffer)
});
} catch(err) {
console.log("BOL Error: ", err);
}
There's are multiple ways you can get the favourable response refer below:
Making Requests with the Document Client
Formatting DynamoDB data to normal JSON in AWS Lambda

Cloud Document AI Can't Process Documents From Storage

I have the following code:
const client = new DocumentProcessorServiceClient();
const inputConfig = {
mimeType: 'application/pdf',
gcsSource: {
uri: gcsSourceUri,
},
};
const outputConfig = {
gcsDestination: {
uri: gcsDestinationUri,
},
};
const name = `projects/myproject/locations/us/processors/myprocessor`;
const request = {
name,
inputConfigs: [inputConfig],
outputConfig: outputConfig
};
async function getText() {
const [operation] = await client.batchProcessDocuments(request);
};
getText();
Which produces the following error:
(node:72144) UnhandledPromiseRejectionWarning: Error: Failed to process all the documents
at Operation._unpackResponse (E:\development\audiobook\node_modules\google-gax\build\src\longRunningCalls\longrunning.js:136:31)
at E:\development\audiobook\node_modules\google-gax\build\src\longRunningCalls\longrunning.js:122:18
(node:72144) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). To terminate the node process on unhandled promise rejection, use the CLI flag `--unhandled-rejections=strict` (see https://nodejs.org/api/cli.html#cli_unhandled_rejections_mode). (rejection id: 1)
(node:72144) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejections that are not handled will terminate the Node.js process with a non-zero exit code.
What might be the problem here? The Cloud Storage URI is working and the file is in the correct format.
In my case the problem was the format of outputConfig.
Checking your code I found you are defining the outputConfig this way:
const outputConfig = {
gcsDestination: {
uri: gcsDestinationUri,
},
};
while according to the documentation you should change it into
const outputConfig = {
gcsDestination: {
gcsDestinationUri: 'gs://bucket-name/folder/',
},
};
https://stackoverflow.com/a/65826292/6216983 is correct for v1beta3
For the GA version v1, you can find the updated request structure in the documentation:
https://cloud.google.com/document-ai/docs/send-request#documentai_process_document-nodejs

Connect AWS mobile backend to DynamoDB

I am trying to use AWS mobile backend (using lambda function) to insert into dynamoDB (also configured at the mobile backend) but with no success so far.
The relevant code:
'use strict';
console.log("Loading function");
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region:process.env.MOBILE_HUB_PROJECT_REGION});
exports.handler = function(event, context, callback) {
var responseCode = 200;
var requestBody, pathParams, queryStringParams, headerParams, stage,
stageVariables, cognitoIdentityId, httpMethod, sourceIp, userAgent,
requestId, resourcePath;
console.log("request: " + JSON.stringify(event));
// Request Body
requestBody = event.body;
if (requestBody !== undefined && requestBody !== null) {
// Set 'test-status' field in the request to test sending a specific response status code (e.g., 503)
responseCode = JSON.parse(requestBody)['test-status'];
}
// Path Parameters
pathParams = event.path;
// Query String Parameters
queryStringParams = event.queryStringParameters;
// Header Parameters
headerParams = event.headers;
if (event.requestContext !== null && event.requestContext !== undefined) {
var requestContext = event.requestContext;
// API Gateway Stage
stage = requestContext.stage;
// Unique Request ID
requestId = requestContext.requestId;
// Resource Path
resourcePath = requestContext.resourcePath;
var identity = requestContext.identity;
// Amazon Cognito User Identity
cognitoIdentityId = identity.cognitoIdentityId;
// Source IP
sourceIp = identity.sourceIp;
// User-Agent
userAgent = identity.userAgent;
}
// API Gateway Stage Variables
stageVariables = event.stageVariables;
// HTTP Method (e.g., POST, GET, HEAD)
httpMethod = event.httpMethod;
// TODO: Put your application logic here...
let params = {
Item:{
"prop1":0,
"prop2":"text"
},
TableName:"testTable"
};
docClient.put(params, function(data, err){
if(err)
responseCode = 500;
else
{
responseCode = 200;
context.succeed(data);
}
});
// For demonstration purposes, we'll just echo these values back to the client
var responseBody = {
requestBody : requestBody,
pathParams : pathParams,
queryStringParams : queryStringParams,
headerParams : headerParams,
stage : stage,
stageVariables : stageVariables,
cognitoIdentityId : cognitoIdentityId,
httpMethod : httpMethod,
sourceIp : sourceIp,
userAgent : userAgent,
requestId : requestId,
resourcePath : resourcePath
};
var response = {
statusCode: responseCode,
headers: {
"x-custom-header" : "custom header value"
},
body: JSON.stringify(responseBody)
};
console.log("response: " + JSON.stringify(response))
context.succeed(response);
};
this doesn't put the item to the table for some reason.
I gave the necessary permissions using the roles part, anything I am missing?
**responseCode is only for testing purposes.
Edit:
tried AWS node.js lambda request dynamodb but no response (no err, no return data) and doesn't work either.
Edit2:
Added the full handler code. (it the default generated code when creating first AWS lambda).
I have refactored some bits of your code to look much simpler and use async/await (make sure to select Node 8.10 as the running environment for your function) instead of callbacks. I also got rid of the context and callback parameters, as they were used for older versions of NodeJS. Once you're using Node 8+, async/await should be the default option.
Also, it is possible to chain a .promise() on docClient.putItem, so you can easily await on it, making your code way simpler. I have left only the DynamoDB part (which is what is relevant to your question)
'use strict';
console.log("Loading function");
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region:process.env.MOBILE_HUB_PROJECT_REGION});
exports.handler = async (event) => {
let params = {
Item:{
"prop0":1,
"prop2":"text"
},
TableName:"testTable"
};
try {
await docClient.put(params).promise();
} catch (e) {
console.log(e)
return {
messsage: e.message
}
}
return { message: 'Data inserted successfully' };
};
Things to keep in mind if still it does not work:
Make sure your Lambda function has the right permissions to insert items on DynamoDB (AmazonDynamoDBFullAccess will do it)
You ALWAYS have to provide the partition key when inserting items to DynamoDB. On your example, the JSON only has two properties: prop1 and prop2. If none of them are the partition key, your code will certainly fail.
Make sure you table also exists
If you code fails, just check CloudWatch logs as any exception is now captured and printed out on the console.
The reason why no data is written in the table is because the call to DynamoDB put is asynchronous and will return by calling your callback. But during that time, the rest of the code continues to execute and your function eventually finish before the call to DynamoDB has a chance to complete.
You can use the await / async keywords to make your code sychronous :
async function writeToDynamoDB(params) {
return new Promise((resolve,reject) => {
docClient.put(params, function(data, err){
if(err)
reject(500);
else
resolve(data);
});
});
}
let params = ...
var data = await writeToDynamoDB(params)
You can find sample code I wrote (in Typescript) at https://github.com/sebsto/maxi80-alexa/blob/master/lambda/src/DDBController.ts