Unable to read values from contract deployed in hyperledger blockchain - blockchain

I have set up the hyperledger blockchain locally. I run the hyperledger bc service within the docker container. I am able to bring up the node successfully, able to deploy and write to bc using a sample contract.
But couldnt read back the data from the block chain. Below is the error message bc throws. Can anyone pls guide what's wrong here ?
[ibc-js] Deploy Chaincode - Complete
{"query":{},"invoke":{},"details":{"deployed_name":"c123c14a65a511ee79e2a41b23726f473478d002064c01c3ce035cffa1229af083d73f1db220fc2f267b9ae31d66ce2e10113548e7abdf8812986ac3c5770a9c","func":{"invoke":["init","write"],"query":["read"]},"git_url":"https://github.com/IBM-Blockchain/learn-chaincode/finished","options":{"quiet":true,"timeout":60000,"tls":false},"peers":[{"name":"vp0-vp0...:49155","api_host":"127.0.0.1","api_port":49155,"id":"vp0","tls":false}],"timestamp":1470146338831,"users":[],"unzip_dir":"learn-chaincode-master/finished","version":"github.com/hyperledger/fabric/core/chaincode/shim","zip_url":"https://github.com/IBM-Blockchain/learn-chaincode/archive/master.zip"}}
sdk has deployed code and waited
[ibc-js] write - success: { jsonrpc: '2.0',
result:
{ status: 'OK',
message: '8b340e92-f96f-41f6-9b15-6ccb23304360' },
id: 1470146405598 }
write response: { jsonrpc: '2.0',
result:
{ status: 'OK',
message: '8b340e92-f96f-41f6-9b15-6ccb23304360' },
id: 1470146405598 }
[ibc-js] read - success: { jsonrpc: '2.0',
error:
{ code: -32003,
message: 'Query failure',
data: 'Error when querying chaincode: Error:Failed to launch chaincode spec(Could not get deployment transaction for c123c14a65a511ee79e2xxxxxxxxxxxxxxxxe7abdf8812986ac3c5770a9c - LedgerError - ResourceNotFound: ledger: resource not found)' },
id: 1470146405668 }
read response: null { name: 'query() resp error',
code: 400,
details:
{ code: -32003,
message: 'Query failure',
data: 'Error when querying chaincode: Error:Failed to launch chaincode spec(Could not get deployment transaction for c123c14a65a511ee79e2xxxxxxxxxxxxxxxxe7abdf8812986ac3c5770a9c - LedgerError - ResourceNotFound: ledger: resource not found)' } }
I have used IBM Blockchain JS for interacting w/ the go contract.
Below is the node js code
// Step 1 ==================================
var Ibc1 = require('ibm-blockchain-js');
var ibc = new Ibc1(/*logger*/); //you can pass a logger such as winston here - optional
var chaincode = {};
// ==================================
// configure ibc-js sdk
// ==================================
var options = {
network:{
peers: [{
"api_host": "127.0.0.1",
"api_port": 49155,
//"api_port_tls": 49157,
"id": "vp4"
}],
users: null,
options: {quiet: true, tls:false, maxRetry: 1}
},
chaincode:{
zip_url: 'https://github.com/IBM-Blockchain/learn-chaincode/archive/master.zip',
unzip_dir: 'learn-chaincode-master/finished',
git_url: 'https://github.com/IBM-Blockchain/learn-chaincode/finished'
}
};
// Step 2 ==================================
ibc.load(options, cb_ready);
// Step 3 ==================================
function cb_ready(err, cc){ //response has chaincode functions
chaincode = cc;
console.log(JSON.stringify(cc));
chaincode.deploy('init', ['Hi hyperledger'], null, cb_deployed);
// Step 5 ==================================
function cb_deployed(){
console.log(JSON.stringify(chaincode));
console.log('sdk has deployed code and waited');
console.log('******** Writing to chaincode Now **********');
chaincode.invoke.write(["mykey","Hi Ledger Systems"],function(err,data){
console.log('write response:', data);
readData();
});
}
function readData()
{
console.log('\n\n**** Waiting 7 seconds before reading **** \n\n');
setTimeout(function () {
console.log('\n\n**** Start reading **** \n\n');
chaincode.invoke.read(["mykey"],function(err,data){
console.log('read response:', data);
});
}, 7000)
}
}

Related

when i update to webpack5, there is a error: configuration has an unknown property 'before'

when I update webpack 4 to 5, the error exits.
I have a webpackDevServer.js which include the error message 'error'
// webpackDevServer.js
module.exports = function(proxy, allowedHost) {
return {
before(app, server) {
if (fs.existsSync(paths.proxySetup)) {
// This registers user provided middleware for proxy reasons
require(paths.proxySetup)(app);
}
// This lets us fetch source contents from webpack for the error overlay
app.use(evalSourceMapMiddleware(server));
// This lets us open files from the runtime error overlay.
app.use(errorOverlayMiddleware());
// This service worker file is effectively a 'no-op' that will reset any
// previous service worker registered for the same host:port combination.
// We do this in development to avoid hitting the production cache if
// it used the same host and port.
// https://github.com/facebook/create-react-app/issues/2272#issuecomment-302832432
app.use(noopServiceWorkerMiddleware());
},
};
};
I use the above file in a start.js file, when I run the project, I type node scripts/start.js
// start.js
...
const createDevServerConfig = require('../config/webpackDevServer.config');
...
const serverConfig = createDevServerConfig(
proxyConfig,
urls.lanUrlForConfig
);
const devServer = new WebpackDevServer(compiler, serverConfig);
then it throws an error
configuration has an unknown property 'before'. These properties are valid:
object { bonjour?, client?, compress?, dev?, firewall?, headers?, historyApiFallback?, host?, hot?, http2?, https?, injectClient?, injectHot?, liveReload?, onAfterSetupMiddleware?, onBeforeSetupMiddleware?, onListening?, open?, openPage?, overlay?, port?, proxy?, public?, setupExitSignals?, static?, stdin?, transportMode?, useLocalIp? }
here is my package.json
"webpack": "^5.20.2",
"webpack-dev-server": "^4.0.0-beta.0",
"webpack-manifest-plugin": "2.0.4",
"workbox-webpack-plugin": "^6.1.0"
You have to change before to the onBeforeSetupMiddleware. Link with migration description from v3 to v4. https://github.com/webpack/webpack-dev-server/blob/master/migration-v4.md
In case, something will change on the migration guide, details are attached below
v3:
module.exports = {
devServer: {
after: function (app, server, compiler) {
app.get("/some/path", function (req, res) {
res.json({ custom: "response" });
});
},
},
};
v4:
module.exports = {
devServer: {
onAfterSetupMiddleware: function (devServer) {
devServer.app.get("/some/path", function (req, res) {
res.json({ custom: "response" });
});
},
},
};
fxxk, I'm stupid, when i search some key word (eg: onBeforeSetupMiddleware), I found the github of webpack-dev-server which tell the changes in new version 4.0.0 beta. https://github.com/webpack/webpack-dev-server/releases

Why is transaction marked as invalid?

I am trying to create a transaction by sending 1 ether from one account to another. Currently, I'm running a local fully-synched parity node. It's running on the Volta test network of EWF (Energy Web Foundation). It's actually possible connecting to that node by Metamask and sending some Ether, but whenever I try that with the web3js which runs in a nodejs app, the parity node gives the following warning/output:
2019-10-25 00:56:50 jsonrpc-eventloop-0 TRACE own_tx Importing transaction: PendingTransaction { transaction: SignedTransaction { transaction: UnverifiedTransaction { unsigned: Transaction { nonce: 1, gas_price: 60000000000, gas: 21000, action: Call(0x2fa24fee2643d577d2859e90bc6d9df0e952034c), value: 1000000000000000000, data: [] }, v: 37, r: 44380982720866416681786190614497685349697533052419104293476368619223783280954, s: 3058706309566473993642661190954381582008760336148306221085742371338506386965, hash: 0x31b4f889f5f10e08b9f10c87f953c9dfded5d0ed1983815c3b1b837700f43702 }, sender: 0x0b9b5323069e9f9fb438e89196b121f3d40fd56e, public: Some(0xa3fc6a484716b844f18cef0039444a3188a295811f133324288cb963f3e5a21dd6ee19c91e42fa745b45a3cf876ff04e0fd1d060ccfe1dab9b1d608dda4c3733) }, condition: None }
2019-10-25 00:56:50 jsonrpc-eventloop-0 DEBUG own_tx Imported to the pool (hash 0x31b4f889f5f10e08b9f10c87f953c9dfded5d0ed1983815c3b1b837700f43702)
2019-10-25 00:56:50 jsonrpc-eventloop-0 WARN own_tx Transaction marked invalid (hash 0x31b4f889f5f10e08b9f10c87f953c9dfded5d0ed1983815c3b1b837700f43702)
When I checked the balance of the accounts, nothing has happened. I've tried to increase gasPrice, adding a 'from' key/value pair to txObject, etc. I have also started parity node with --no-persistent-txqueue so that it doesn't cache too many of transactions, as it's suggested here. But that didn't change anything, either. So I still get the same error and transaction doesn't go through. What is causing this problem and how can I solve it?
web3.eth.getTransactionCount(from, (err, txCount) => {
const txObject = {
nonce: web3.utils.toHex(txCount),
from: from,
to: to,
value: web3.utils.toHex(web3.utils.toWei(val, 'ether')),
gas: web3.utils.toHex(21000),
gasPrice: web3.utils.toHex(web3.utils.toWei('60', 'gwei'))
}
// Sign the transaction
const tx = new Tx(txObject);
tx.sign(pk);
const serializedTx = tx.serialize();
const raw = '0x' + serializedTx.toString('hex');
// Broadchast the transaction to the network
web3.eth.sendSignedTransaction(raw, (err, txHash) => {
if (txHash === undefined) {
res.render('sendTransaction', {
txSuccess: 0,
blockHash: 'Hash Undefined'
});
res.end();;
} else {
res.render('sendTransaction', {
txSuccess: 1,
blockHash: txHash,
});
res.end();;
}
});
});
Any suggestion is welcome,
Thanks!
The code looks fine so it must be something small, you're very close!
What does the error object contain in web3.eth. sendSignedTransaction callback?
Check your nonce to make sure it has not been used yet. https://volta-explorer.energyweb.org/address/0x0b9b5323069e9f9fb438e89196b121f3d40fd56e/transactions shows a few transactions from the sender address, with "4" being next value for nonce
Try generating the raw transaction via JavaScript and broadcasting it manually via command line

AWS Device Farm - Missing or unprocessed resources

I followed following steps while trying to run android app test via AWS Lambda Node.JS
Created a project
Created an upload
Uploaded APK to signed url
Once upload was done I created device pool using following params
var createDevicePoolParams = {
name: "DAP_Device_Pool",
description: "DAP_Android_Devices",
projectArn: projectARN,
rules: [{
attribute: "PLATFORM",
operator: "EQUALS",
value: "\"ANDROID\""
}]
};
Then I called schedulerun with following params
var scheduleRunParams = {
appArn: uploadARN,
name: "tarunRun",
devicePoolArn: devicePoolARN,
projectArn: projectARN,
test: {
type: "BUILTIN_FUZZ",
}
};
But I am getting error of missing or unprocessed resources.
I am not able to understand what I am missing. My understanding is that If I am using built in fuzz testing type then I dont need to upload any custom testcases.
Can somebody pls help pointing out what step is missing
Then
After your uploads have been processed by Device Farm, call aws devicefarm schedule-run
[update]
I put this code in a AWS Lambda function and it worked there as well. Here is a gist of it:
https://gist.github.com/jamesknowsbest/3ea0e385988b0098e5f9d38bf5a932b6
Here is the code I just authored and it seems to work with the Built-inFuzz/Explorer tests
// assume we already executed `npm install aws-sdk`
var AWS = require('aws-sdk');
// assumes `npm install https`
const request = require("request");
// assumes `npm install fs`
const fs = require('fs');
// https://stackoverflow.com/a/41641607/8016330
const sleep = (waitTimeInMs) => new Promise(resolve => setTimeout(resolve, waitTimeInMs));
// Device Farm is only available in the us-west-2 region
var devicefarm = new AWS.DeviceFarm({ region: 'us-west-2' });
(async function() {
let project_params = {
name: "test of fuzz tests"
};
let PROJECT_ARN = await devicefarm.createProject(project_params).promise().then(
function(data){
return data.project.arn;
},
function (error) {
console.error("Error creating project", "Error: ", error);
}
);
console.log("Project created ", "Project arn: ", PROJECT_ARN);
// create the upload and upload files to the project
let params = {
name: "app-debug.apk",
type: "ANDROID_APP",
projectArn: PROJECT_ARN
};
let UPLOAD = await devicefarm.createUpload(params).promise().then(
function(data){
return data.upload;
},
function(error){
console.error("Creating upload failed with error: ", error);
}
);
let UPLOAD_ARN = UPLOAD.arn;
let UPLOAD_URL = UPLOAD.url;
console.log("upload created with arn: ", UPLOAD_ARN);
console.log("uploading file...");
let options = {
method: 'PUT',
url: UPLOAD_URL,
headers: {},
body: fs.readFileSync("/path/to/your/apk/file")
};
// wait for upload to finish
await new Promise(function(resolve,reject){
request(options, function (error, response, body) {
if (error) {
console.error("uploading file failed with error: ", error);
reject(error);
}
resolve(body);
});
});
//get the status of the upload and make sure if finished processing before scheduling
let STATUS = await getStatus(UPLOAD_ARN);
console.log("upload status is: ", STATUS);
while(STATUS !== "SUCCEEDED"){
await sleep(5000);
STATUS = await getStatus(UPLOAD_ARN);
console.log("upload status is: ", STATUS);
}
//create device pool
let device_pool_params = {
projectArn: PROJECT_ARN,
name: "Google Pixel 2",
rules: [{"attribute": "ARN","operator":"IN","value":"[\"arn:aws:devicefarm:us-west-2::device:5F20BBED05F74D6288D51236B0FB9895\"]"}]
}
let DEVICE_POOL_ARN = await devicefarm.createDevicePool(device_pool_params).promise().then(
function(data){
return data.devicePool.arn;
},function(error){
console.error("device pool failed to create with error: ",error);
}
);
console.log("Device pool created successfully with arn: ", DEVICE_POOL_ARN);
//schedule the run
let schedule_run_params = {
name: "MyRun",
devicePoolArn: DEVICE_POOL_ARN, // You can get the Amazon Resource Name (ARN) of the device pool by using the list-pools CLI command.
projectArn: PROJECT_ARN, // You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command.
test: {
type: "BUILTIN_FUZZ"
},
appArn: UPLOAD_ARN
};
let schedule_run_result = await devicefarm.scheduleRun(schedule_run_params).promise().then(
function(data){
return data.run;
},function(error){
console.error("Schedule run command failed with error: ", error);
}
);
console.log("run finished successfully with result: ", schedule_run_result);
})();
async function getStatus(UPLOAD_ARN){
return await devicefarm.getUpload({arn: UPLOAD_ARN}).promise().then(
function(data){
return data.upload.status;
},function(error){
console.error("getting upload failed with error: ", error);
}
);
}
Ouput is:
Project created Project arn: arn:aws:devicefarm:us-west-2:111122223333:project:b9233b49-967e-4b09-a51a-b5c4101340a1
upload created with arn: arn:aws:devicefarm:us-west-2:111122223333:upload:b9233b49-967e-4b09-a51a-b5c4101340a1/48ffd115-f7d7-4df5-ae96-4a44911bff65
uploading file...
upload status is: INITIALIZED
upload status is: SUCCEEDED
Device pool created successfully with arn: arn:aws:devicefarm:us-west-2:111122223333:devicepool:b9233b49-967e-4b09-a51a-b5c4101340a1/c0ce1bbc-7b40-4a0f-a419-ab024a6b1000
run finished successfully with result: { arn:
'arn:aws:devicefarm:us-west-2:111122223333:run:b9233b49-967e-4b09-a51a-b5c4101340a1/39369894-3829-4e14-81c9-bdfa02c7e032',
name: 'MyRun',
type: 'BUILTIN_FUZZ',
platform: 'ANDROID_APP',
created: 2019-06-06T23:51:13.529Z,
status: 'SCHEDULING',
result: 'PENDING',
started: 2019-06-06T23:51:13.529Z,
counters:
{ total: 0,
passed: 0,
failed: 0,
warned: 0,
errored: 0,
stopped: 0,
skipped: 0 },
totalJobs: 1,
completedJobs: 0,
billingMethod: 'METERED',
seed: 982045377,
appUpload:
'arn:aws:devicefarm:us-west-2:111122223333:upload:b9233b49-967e-4b09-a51a-b5c4101340a1/48ffd115-f7d7-4df5-ae96-4a44911bff65',
eventCount: 6000,
jobTimeoutMinutes: 150,
devicePoolArn:
'arn:aws:devicefarm:us-west-2:111122223333:devicepool:b9233b49-967e-4b09-a51a-b5c4101340a1/c0ce1bbc-7b40-4a0f-a419-ab024a6b1000',
radios: { wifi: true, bluetooth: false, nfc: true, gps: true } }
HTH
-James

Limit the size dynamoDB primary Key using typescript

I am building an application with ionic3 and AWS lanbda, where i use dynamodb for my database, the primary key generated as the "Id" ( in my case "groupId" ) of the table is too long afc9c380-db7d-11e7-a2c5-3bce7c435d3e I would like to know if there is a way to limit the no of digits to 6 characters? Thank you
My typescript code - "groupId is auto Genrated"
addGroup(form) {
this.submitted = true;
if (form && this.formData.name) {
let group = {
name: this.formData.name,
description: this.formData.description,
profileImageURI: this.profileImageURI,
userId: this.globals.getUserId(),
imageUrl: this.formData.imageUrl
};
this.globals.displayLoader("Adding...");
this.client.getClient().groupsCreateByUserId(this.globals.getUserId(), group).subscribe(
(data) => {
this.globals.dismissLoader();
this.globals.displayToast(`Group successfully added.`);
this.navCtrl.pop();
},
(err) => {
this.globals.dismissLoader();
this.globals.displayAlert('Error encountered',
`An error occurred when trying to add the group. Please check the console logs for more information.`);
console.error(err);
}
);
}
this.globals.dismissLoader();
}

AWS javascript SDK request.js send request function execution time gradually increases

I am using aws-sdk to push data to Kinesis stream.
I am using PutRecord to achieve realtime data push.
I am observing same delay in putRecords as well in case of batch write.
I have tried out this with 4 records where I am not crossing any shard limit.
Below is my node js http agent configurations. Default maxSocket value is set to infinity.
Agent {
domain: null,
_events: { free: [Function] },
_eventsCount: 1,
_maxListeners: undefined,
defaultPort: 80,
protocol: 'http:',
options: { path: null },
requests: {},
sockets: {},
freeSockets: {},
keepAliveMsecs: 1000,
keepAlive: false,
maxSockets: Infinity,
maxFreeSockets: 256 }
Below is my code.
I am using following code to trigger putRecord call
event.Records.forEach(function(record) {
var payload = new Buffer(record.kinesis.data, 'base64').toString('ascii');
// put record request
evt = transformEvent(payload );
promises.push(writeRecordToKinesis(kinesis, streamName, evt ));
}
Event structure is
evt = {
Data: new Buffer(JSON.stringify(payload)),
PartitionKey: payload.PartitionKey,
StreamName: streamName,
SequenceNumberForOrdering: dateInMillis.toString()
};
This event is used in put request.
function writeRecordToKinesis(kinesis, streamName, evt ) {
console.time('WRITE_TO_KINESIS_EXECUTION_TIME');
var deferred = Q.defer();
try {
kinesis.putRecord(evt , function(err, data) {
if (err) {
console.warn('Kinesis putRecord %j', err);
deferred.reject(err);
} else {
console.log(data);
deferred.resolve(data);
}
console.timeEnd('WRITE_TO_KINESIS_EXECUTION_TIME');
});
} catch (e) {
console.error('Error occured while writing data to Kinesis' + e);
deferred.reject(e);
}
return deferred.promise;
}
Below is output for 3 messages.
WRITE_TO_KINESIS_EXECUTION_TIME: 2026ms
WRITE_TO_KINESIS_EXECUTION_TIME: 2971ms
WRITE_TO_KINESIS_EXECUTION_TIME: 3458ms
Here we can see gradual increase in response time and function execution time.
I have added counters in aws-sdk request.js class. I can see same pattern in there as well.
Below is code snippet for aws-sdk request.js class which executes put request.
send: function send(callback) {
console.time('SEND_REQUEST_TO_KINESIS_EXECUTION_TIME');
if (callback) {
this.on('complete', function (resp) {
console.timeEnd('SEND_REQUEST_TO_KINESIS_EXECUTION_TIME');
callback.call(resp, resp.error, resp.data);
});
}
this.runTo();
return this.response;
},
Output for send request:
SEND_REQUEST_TO_KINESIS_EXECUTION_TIME: 1751ms
SEND_REQUEST_TO_KINESIS_EXECUTION_TIME: 1816ms
SEND_REQUEST_TO_KINESIS_EXECUTION_TIME: 2761ms
SEND_REQUEST_TO_KINESIS_EXECUTION_TIME: 3248ms
Here you can see it is increasing gradually.
Can anyone please suggest how can I reduce this delay?
3 seconds to push single record to Kinesis is not at all acceptable.