Attribute Based access control with Hyperledger-Fabric - blockchain

I have pulled fabric-repository from master branch. Using node-SDK
I am trying to register a user and enroll him with an attribute hf.registrar.Role attribute and requesting the certificate with the same attribute
Then I am invoking a transaction and trying to decode his attribute in chaincode
I am also trying to decode his attribute in at node layer
But I am getting attribute as null and attribute status as false
var attr1_req={name:"hf.Registrar.Roles",required:true};
attr_req.push(attr1_req);
var attr_req=[];
var reg_attr=[];
var attr_reg={name:"hf.Registrar.Roles",value:"this is roles"};
reg_attr.push(attr_reg);
return hfc.newDefaultKeyValueStore({
path: getKeyStoreForOrg(getOrgName(userOrg))
}).then((store) => {
client.setStateStore(store);
// clearing the user context before switching
client._userContext = null;
return client.getUserContext(username, true).then((user) => {
logger.debug("User :"+user)
if (user && user.isEnrolled()) {
logger.info('Successfully loaded member from persistence');
return user;
} else {
let caClient = caClients[userOrg];
logger.debug("Ca client: "+caClient)
return getAdminUser(userOrg).then(function(adminUserObj) {
member = adminUserObj;
return caClient.register({
enrollmentID: username,
affiliation: userOrg + '.department1',
attrs:reg_attr
}, member);
}).then((secret) => {
enrollmentSecret = secret;
logger.debug(username + ' registered successfully');
return caClient.enroll({
enrollmentID: username,
enrollmentSecret: secret,
attr_reqs:attr_req
});
}, (err) => {
logger.debug(username + ' failed to register');
return '' + err;
//return 'Failed to register '+username+'. Error: ' + err.stack ? err.stack : err;
}).then((message) => {
if (message && typeof message === 'string' && message.includes(
'Error:')) {
logger.error(username + ' enrollment failed');
return message;
}
logger.debug(username + ' enrolled successfully');
logger.debug("message :"+message.certificate);
let cert = X509.parseCert(message.certificate);
logger.debug("parsed cert: "+cert);
logger.debug("cert extensions ")
logger.debug(cert.extensions)
logger.debug("cert extensions[......] "+cert.extensions['1.2.3.4.5.6.7.8.1'])
if(cert && cert.extensions && cert.extensions['1.2.3.4.5.6.7.8.1']) {
logger.debug("reached line 324 member")
let attr_string=cert.extensions['1.2.3.4.5.6.7.8.1'];
let attr_object = JSON.parse(attr_string);
let attrs = attr_object.attrs;
logger.debug("attributes: "+attrs)
}
From chaincode part:
This is the code in chaincode
attrvalue,status,_:=cid.GetAttributeValue(stub,"hf.Registrar.Roles")
fmt.Printf("attr Value: %s status : %t", attrvalue ,status)
fmt.Println("attr Value: %s status : %t", attrvalue ,status)
From Node part:
Cert.extension is
{ keyUsage: 'Digital Signature',
basicConstraints: 'CA:FALSE',
subjectKeyIdentifier: 'F3:66:26:E7:0D:1A:15:E9:F0:40:6F:FF:17:A8:5C:D3:CE:B8:4C:50',
authorityKeyIdentifier: 'keyid:8D:0F:3C:42:48:8D:31:FE:72:06:99:4D:CE:1D:25:4E:A1:8C:DA:47:85:24:73:51:91:D4:CF:93:D6:7D:48:B2' }

Issue is resolved
Steps
paste https://github.com/hyperledger/fabric-sdk-node/tree/release/fabric-ca-client/lib/FabricCAClientImpl.js to node_modules/fabric-ca-client/lib
Clone fabric-ca from master branch and give make clean & make docker
clone fabric from master branch

Related

HTTP Cloud Function returns 500 but there is no errors in the logs

I'm getting a strange behaviour, I have a few http functions in Firebase Cloud Functions. They work perfectly, but there are days that they start returning status 500 for a while and then go back to working normal for a few minutes and then start returning status 500 again, this behaviour remains for the entire day.
The most strange part is that I don't get any error messages on my stack driver, in fact, there are no registries about these calls, it is as if the calls doesn't reach google's services somehow or it is just rejected and there are no registries about it.
I'll post the implementation of one of the most used functions in my application:
import * as functions from 'firebase-functions';
import * as admin from 'firebase-admin';
admin.initializeApp()
exports.changeOrderStatus_1 = functions.https.onRequest((request, response) =>
{
//Check Headers
const clientID = request.get('ClientID');
if(clientID === null || clientID === undefined || clientID === "")
{
console.error(new Error('clientID not provided.'));
return response.status(500).send('clientID not provided.');
}
const unitID = request.get('UnitID');
if(unitID === null || unitID === undefined || unitID === "")
{
console.error(new Error('unitID not provided.'));
return response.status(500).send('unitID not provided.');
}
//Check body
const orderID = request.body.OrderID;
if(orderID === null || orderID === undefined || orderID === "")
{
console.error(new Error('orderID not provided.'));
return response.status(500).send('orderID not provided.');
}
const orderStatus = request.body.OrderStatus;
if(orderStatus === null || orderStatus === undefined || orderStatus === "")
{
console.error(new Error('orderStatus not provided.'));
return response.status(500).send('orderStatus not provided.');
}
const orderStatusInt = Number.parseInt(String(orderStatus));
const notificationTokenString = String(request.body.NotificationToken);
const customerID = request.body.CustomerID;
const promises: any[] = [];
const p1 = admin.database().ref('Clients/' + clientID + '/UnitData/'+ unitID +'/FreshData/Orders/' + orderID + '/Status').set(orderStatusInt);
promises.push(p1);
if(notificationTokenString !== null && notificationTokenString.length !== 0 && notificationTokenString !== 'undefined' && !(customerID === null || customerID === undefined || customerID === ""))
{
const p2 = admin.database().ref('Customers/' + customerID + '/OrderHistory/' + orderID + '/Status').set(orderStatusInt);
promises.push(p2);
if(orderStatusInt > 0 && orderStatusInt < 4)
{
const p3 = admin.database().ref('Customers/' + customerID + '/ActiveOrders/' + orderID).set(orderStatusInt);
promises.push(p3);
}
else
{
const p4 = admin.database().ref('Customers/' + customerID + '/ActiveOrders/' + orderID).set(null);
promises.push(p4);
}
let title = String(request.body.NotificationTitle);
let message = String(request.body.NotificationMessage);
if(title === null || title.length === 0)
title = "?????";
if(message === null || message.length === 0)
message = "?????";
const payload =
{
notification:
{
title: title,
body: message,
icon: 'notification_icon',
sound : 'default'
}
};
const p5 = admin.messaging().sendToDevice(notificationTokenString, payload);
promises.push(p5);
}
return Promise.all(promises).then(r => { return response.status(200).send('success') })
.catch(error =>
{
console.error(new Error(error));
return response.status(500).send(error)
});
})
And this is how I invoke it, the client application is running on Xamarin Forms app usinde the c# language:
static HttpClient Client;
public static void Initialize()
{
Client = new HttpClient();
Client.BaseAddress = new Uri("My cloud functions adress");
Client.DefaultRequestHeaders.Add("UnitID", UnitService.GetUnitID());
Client.DefaultRequestHeaders.Add("ClientID", AuthenticationService.GetFirebaseAuth().User.LocalId);
}
public static async Task<bool> CallChangeOrderStatus(OrderHolder holder, int status)
{
Debug.WriteLine("CallChangeOrderStatus: " + status);
try
{
var content = new Dictionary<string, string>();
content.Add("OrderID", holder.Order.ID);
content.Add("OrderStatus", status.ToString());
if (!string.IsNullOrEmpty(holder.Order.NotificationToken) && NotificationService.ShouldSend(status))
{
content.Add("CustomerID", holder.Order.SenderID);
content.Add("NotificationToken", holder.Order.NotificationToken);
content.Add("NotificationTitle", NotificationService.GetTitle(status));
content.Add("NotificationMessage", NotificationService.GetMessage(status));
}
var result = await Client.PostAsync("changeOrderStatus_1", new FormUrlEncodedContent(content));
return result.IsSuccessStatusCode;
}
catch (HttpRequestException exc)
{
#if DEBUG
ErrorHandlerService.ShowErrorMessage(exc);
#endif
Crashes.TrackError(exc);
return false;
}
}
These functions are called several times a minute at a time but can go for up to an hour without being called.
I have sent the requests from mobile conections, wifi conections, wired conections and from various internet providers and the problem still happens nonetheless.
Am I doing something wrong? Am I missing something? Is it instability in google servers?

Error with AWS Lambda salt and hash

I've been using source code from AWS Lambda in Action - Poccia, to create users in a User Pool and Identity Pool. I keep getting the error:
Response:
{
"errorMessage": "RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Process exited before completing request"
}
Request ID:
"f6511085-f22c-11e7-be27-534dfc5d6456"
Function Logs:
START RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Version: $LATEST
2018-01-05T15:27:38.890Z f6511085-f22c-11e7-be27-534dfc5d6456 TypeError: Pass phrase must be a buffer
at TypeError (native)
at pbkdf2 (crypto.js:576:20)
at Object.exports.pbkdf2 (crypto.js:558:10)
at computeHash (/var/task/lib/cryptoUtils.js:10:10)
at InternalFieldObject.ondone (/var/task/lib/cryptoUtils.js:19:4)
END RequestId: f6511085-f22c-11e7-be27-534dfc5d6456
REPORT RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Duration: 113.62 ms Billed Duration: 200 ms Memory Size: 128 MB Max Memory Used: 33 MB
RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Process exited before completing request
I'm new to AWS Services and am not sure why this error is occurring. Below is the Lambda function I'm attempting to use and following is the cryptoUtils.js script it's referencing.
console.log('Loading function');
//Loading standard module, such as crypto and the AWS SDK
var AWS = require('aws-sdk');
var crypto = require('crypto');
var cryptoUtils = require('./lib/cryptoUtils.js'); //Loading the cryptoUtils.js module shared code, included in the uploaded ZIP archive
var config = require('./config.json'); //Loading the configuration in the config.json file, included in the uploaded ZIP archive
var dynamodb = new AWS.DynamoDB({
accessKeyId: 'usingKEYfromIAM',
secretAccessKey: 'usingKEYfromIAM',
}); //Getting the Amazon DynamoDB service object
var ses = new AWS.SES(); //Getting Amazon SES service object
function storeUser(email, password, salt, fn) { //The storeUser() function stores the new user in the DynamoDB table.
var len = 128;
crypto.randomBytes(len, function(err, token) { //Arandom token sent in the validation email and used to validate a user
if (err) return fn(err);
token = token.toString('hex');
dynamodb.putItem({ //Putting an item in the DynamoDB table
TableName: config.DDB_TABLE, //The table name is taken from the config.json configuration file.
//Most of the data is string ("S"), but the verifiede attribute is Boollean ("BOOL"),
//new users aren't verified (false), and the randomly generated token is stored in the "verifyToken" attribute
Item: {
email: {
S: email
},
passwordHash: {
S: password
},
passwordSalt: {
S: salt
},
verified: {
BOOL: false
},
verifyToken: {
S: token
}
},
ConditionExpression: 'attribute_not_exists (email)' //This condition avoids overwriting existing users (with the same email).
}, function(err, data) {
if (err) return fn(err);
else fn(null, token); //The storeUser() function returns the randomly generated token.
});
});
}
function sendVerificationEmail(email, token, fn) { //The send-VerificationEmail() funciton sends the verification email to the new user.
var subject = 'Verification Email for ' + config.EXTERNAL_NAME;
//The verification link, to the verify.hrml page, passes the randomly generated token as a query parameter.
var verificationLink = config.VERIFICATION_PAGE + '?email=' + encodeURIComponent(email) + '&verify=' + token;
ses.sendEmail({ //Sending the email in HTML format
Source: config.EMAIL_SOURCE,
Destination: {
ToAddresses: [
email
]
},
Message: {
Subject: {
Data: subject
},
Body: {
Html: {
Data: '<html><head>' + '<meta http-equiv= "Content-Type" content="test/html; charset=UTF-8" />' +
'<title>' + subject + '</title>' + '</head><body>' + 'Please <a href="' + verificationLink +
'">click here to verify your email address</a> or a copy & paste the following link in a browser:' +
'<br><br>' + '' + verificationLink + '' + '</body></html>'
}
}
}
}, fn);
}
exports.handler = (event, context, callback) => { //The function that's exported and can be invoked using AWS Lambda as createUser
//Getting the input parameters (email, password) from the event
var email = event.email;
var clearPassword = event.password;
//Using compute-Hash() from cryptoUtils.js to salt the password.
cryptoUtils.computeHash(clearPassword, function(err, salt, hash) {
if (err) {
callback('Error in hash: ' + err);
} else {
storeUser(email, hash, salt, function(err, token) { //Storing the user via the storeUser()function
if (err) {
if (err.code == 'ConditionalCheckFailedException') { //Checking if the database error is due to the email being already prsent in the database
//userID already found
callback(null, {
created: false
});
} else {
callback('Error in storUser: ' + err);
}
} else {
sendVerificationEmail(email, token, function(err, data) { //Sending the verification email
if (err) {
callback('Error in sendVerificationEmail: ' + err);
} else {
callback(null, {
created: true
});
}
});
}
});
}
});
};
var crypto = require('crypto');
function computeHash(password, salt, fn) {
var len = 512;
var iterations = 4096;
var digest = 'sha512';
if (3 == arguments.length) {
crypto.pbkdf2(password, salt, iterations, len, digest, function(err, derivedKey) {
if (err) return fn(err);
else fn(null, salt, derivedKey.toString('base64'));
});
} else {
fn = salt;
crypto.randomBytes(len, function(err, solat) {
if (err) return fn(err);
salt = salt.toString('base64');
computeHash(password, salt, fn);
});
}
}
module.exports.computeHash = computeHash;
If anybody has any suggestions or needs more information to help me determine why the error is occurring I would greatly appreciate it. Thank you.
The password you're passing is a number?
If so:
Convert it to String.
If you don't want to do that, you can pass a Buffer object:
Pass a Buffer object using the class Method Buffer.from(string[, encoding])
https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_string_encoding
crypto.pbkdf2(Buffer.from(password, 'utf8'), salt, iterations, len, digest, function(err, derivedKey) {
if (err) return fn(err);
else fn(null, salt, derivedKey.toString('base64'));
});
Hope it helps!
var crypto = require('crypto');
function computeHash(password, salt, fn) {
// Bytesize. The larger the numbers, the better the security, but the longer it will take to complete
var len = 512;
var iterations = 4096;
var digest = 'sha512';
if (3 == arguments.length) {
crypto.pbkdf2(Buffer.from(password, 'utf8'), salt, iterations, len, digest, function(err, derivedKey) {
if (err) return fn(err);
else fn(null, salt, derivedKey.toString('base64'));
});
} else {
fn = salt;
crypto.randomBytes(len, function(err, salt) {
if (err) return fn(err);
salt = salt.toString('base64');
computeHash(password, salt, fn);
});
}
}
module.exports.computeHash = computeHash;
Error "TypeError: Pass phrase must be a buffer"
does suggest trying a Buffer conversion of input String
Before your test of Buffer.from(password, 'utf8')
did you verify input value is encoding 'utf8',
and not some other encoding such as base64 or latin1 ?
List of encodings that Node.js supports

Ionic2 sqlitePlugin is not defined

How to evade this error: VM18193:27 Unable to open database ReferenceError: sqlitePlugin is not defined(…)
setTimeout(function() {
let db = new SQLite();
db.openDatabase({
name: "data.db",
location: "default"
}).then(() => {
db.executeSql("CREATE TABLE IF NOT EXISTS people (id INTEGER PRIMARY KEY AUTOINCREMENT, firstname TEXT, lastname TEXT)", {}).then((data) => {
console.log("TABLE CREATED: ", data);
}, (error) => {
console.error("Unable to execute sql", error);
})
}, (error) => {
console.error("Unable to open database", error);
});
}, 2000);
How can i execute some query?
if(SqlSettingsService.openDb){
this.db = SqlSettingsService.getDB();
this.db.executeSql("CREATE TABLE IF NOT EXISTS people (id INTEGER PRIMARY KEY AUTOINCREMENT, firstname TEXT, lastname TEXT", {}).then
instead i get error.
console:
SqlSettingsService() starts
VM21750:27 Unhandled Promise rejection: Cannot read property 'executeSql' of null ; Zone: <root> ; Task: Promise.then ; Value: TypeError: Cannot read property 'executeSql' of null(…) TypeError: Cannot read property 'executeSql' of null
The plugin sqlLite does not work in a browser, you can use Websql for the browser instead (with compatible browsers, including Chrome and Opera as far as I know).
Transactions written for Websql are compatible with transactions written for SQLite.
Here is a service I have done to manage the Connection to the DB and make it work regardless if the program runs in a browser or a on real device:
import { Injectable } from '#angular/core';
import { SQLite } from 'ionic-native';
import { Platform } from 'ionic-angular';
import { Storage } from '#ionic/storage';
#Injectable()
export class SqlSettingsService {
private db: any = null;
private isOpened: boolean = false;
constructor() {
console.log('SqlSettingsService() starts');
}
public getDB(){
return this.db;
}
public openDb = (platform:Platform,winSer:any):Promise<any> => {
console.log('SqlSettingsService() opend DB starts');
let p:Promise<any>;
if(!this.isOpened){
this.isOpened = true;
if(platform.is('core')){
this.db = winSer.window.openDatabase("ionic2BrowserDev","1.0","",5*1024*1024);
p = new Promise(function(resolve,reject){resolve('websql success')});
} else {
this.db = new SQLite();
p = this.db.openDatabase({
name: 'data.db',
location: 'default' // the location field is required
}).then(
()=>{console.log("SqlSettingsService open db successful")},
(err)=>{console.error(err)}
);
}
} else {
p = new Promise(function(resolve,reject){
resolve('db already opened');
});
}
return p;
}
public closeDb = () => {
this.isOpened = false;
return this.db.close();
}
}
winSer is another service to access the window Object that I use in my app.component.ts when I call openDB() on SqlSettingsService. It's just this:
import { Injectable } from '#angular/core';
#Injectable()
export class WindowService {
public window = window;
}
To execute query:
[SqlSettingsService-instance].openDb();
[SqlSettingsSevice-instance].getDB().transaction(
function(tx){
tx.executeSql([your sql],[bracket values you want to pass],success,error);
function success(tx,rs){
console.log("success exec sql: ")
console.info(rs);
}
function error(tx,error){
console.log('execSqlCustom error ' + error.message + " for tx " + tx);
}
});

IE9: store.find is failing

I can't seem to fetch new data in Internet Explorer 9. For the purpose of an example I test the store this way:
App.__container__.lookup('store:main').find('style')
The only error I receive is the following:
SCRIPT5022: Error: Assertion Failed: [object Object]
Does Ember-data works out of the box (without polyfills, ...) in Internet Explorer 9?
versions:
Ember: 1.9.1
Ember-data: 1.0.0-beta.12
Problem solved. When doing an AJAX request with jQuery, this normally happens through the XMLHttpRequest object.
On IE8-9, this object is not present, instead it uses XDomainRequest. The simplest fix for this is adding: https://github.com/MoonScript/jQuery-ajaxTransport-XDomainRequest.
ember-data works out of the box with IE8+. According to this issue:
We've been supporting IE8 with our platform (built on Ember) for a
while now. Things I know:
shim/sham is not needed, it's polyfilled by Ember and Ember-Data.
You will need it if you want additional things like .bind() on a function, then you must prepend it to the vendor file (using Brocfile)
and we only include the shim for that purpose, not the sham
Solution Synthesis
Reason :
On IE8-9, this object is not present, instead it uses XDomainRequest.
Solution :
The issue is solved. When using an AJAX request with jQuery. Normally this is done through the XMLHttpRequest object. A simple fix would be using the Open-Source jQuery-ajaxTransport-XDomainRequest
Code : Adding :
jQuery-ajaxTransport-XDomainRequest.js
/*!
* jQuery-ajaxTransport-XDomainRequest - v1.0.4 - 2015-03-05
* https://github.com/MoonScript/jQuery-ajaxTransport-XDomainRequest
* Copyright (c) 2015 Jason Moon (#JSONMOON)
* Licensed MIT (/blob/master/LICENSE.txt)
*/
(function(factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as anonymous module.
define(['jquery'], factory);
} else if (typeof exports === 'object') {
// CommonJS
module.exports = factory(require('jquery'));
} else {
// Browser globals.
factory(jQuery);
}
}(function($) {
// Only continue if we're on IE8/IE9 with jQuery 1.5+ (contains the ajaxTransport function)
if ($.support.cors || !$.ajaxTransport || !window.XDomainRequest) {
return $;
}
var httpRegEx = /^(https?:)?\/\//i;
var getOrPostRegEx = /^get|post$/i;
var sameSchemeRegEx = new RegExp('^(\/\/|' + location.protocol + ')', 'i');
// ajaxTransport exists in jQuery 1.5+
$.ajaxTransport('* text html xml json', function(options, userOptions, jqXHR) {
// Only continue if the request is: asynchronous, uses GET or POST method, has HTTP or HTTPS protocol, and has the same scheme as the calling page
if (!options.crossDomain || !options.async || !getOrPostRegEx.test(options.type) || !httpRegEx.test(options.url) || !sameSchemeRegEx.test(options.url)) {
return;
}
var xdr = null;
return {
send: function(headers, complete) {
var postData = '';
var userType = (userOptions.dataType || '').toLowerCase();
xdr = new XDomainRequest();
if (/^\d+$/.test(userOptions.timeout)) {
xdr.timeout = userOptions.timeout;
}
xdr.ontimeout = function() {
complete(500, 'timeout');
};
xdr.onload = function() {
var allResponseHeaders = 'Content-Length: ' + xdr.responseText.length + '\r\nContent-Type: ' + xdr.contentType;
var status = {
code: 200,
message: 'success'
};
var responses = {
text: xdr.responseText
};
try {
if (userType === 'html' || /text\/html/i.test(xdr.contentType)) {
responses.html = xdr.responseText;
} else if (userType === 'json' || (userType !== 'text' && /\/json/i.test(xdr.contentType))) {
try {
responses.json = $.parseJSON(xdr.responseText);
} catch(e) {
status.code = 500;
status.message = 'parseerror';
//throw 'Invalid JSON: ' + xdr.responseText;
}
} else if (userType === 'xml' || (userType !== 'text' && /\/xml/i.test(xdr.contentType))) {
var doc = new ActiveXObject('Microsoft.XMLDOM');
doc.async = false;
try {
doc.loadXML(xdr.responseText);
} catch(e) {
doc = undefined;
}
if (!doc || !doc.documentElement || doc.getElementsByTagName('parsererror').length) {
status.code = 500;
status.message = 'parseerror';
throw 'Invalid XML: ' + xdr.responseText;
}
responses.xml = doc;
}
} catch(parseMessage) {
throw parseMessage;
} finally {
complete(status.code, status.message, responses, allResponseHeaders);
}
};
// set an empty handler for 'onprogress' so requests don't get aborted
xdr.onprogress = function(){};
xdr.onerror = function() {
complete(500, 'error', {
text: xdr.responseText
});
};
if (userOptions.data) {
postData = ($.type(userOptions.data) === 'string') ? userOptions.data : $.param(userOptions.data);
}
xdr.open(options.type, options.url);
xdr.send(postData);
},
abort: function() {
if (xdr) {
xdr.abort();
}
}
};
});
return $;
}));

EXTJS grid store load - adding parameters?

I'm in the process on converting an asp repeater into an EXTJS grid. Above the repeater is a dropdown and a radiobutton list. The dropdown selects which clients' data the repeater shows, and the radiobuttonlist selects the query type (default, resource, or role). Currently, when the ddl or radiobutton is changed, the page postsback with the new data.
I'm not sure how to pass the value of these two objects into my static webservice on the backend via the extjs store api GET call.
The extjs store code...
store: Ext.create('Ext.data.Store', {
autoLoad: true,
autoSync: false,
model: 'Assembly',
proxy: {
type: 'ajax',
headers: { "Content-Type": 'application/json' },
api: {
read: '/Admin/BillRateData.aspx/Get'
},
reader: {
type: 'json',
root: function (o) {
if (o.d) {
return o.d;
} else {
return o.children;
}
}
},
writer: {
type: 'json',
root: 'jsonData',
encode: false,
allowSingle: false
},
listeners: {
exception: function (proxy, response, operation) {
Ext.MessageBox.show({
title: "Workflow Groups Error",
msg: operation.action + ' Operation Failed: ' + operation.getError().statusText,
icon: Ext.MessageBox.ERROR,
buttons: Ext.Msg.OK
});
}
}
}
And the webservice...(with some psuedocode)
[WebMethod]
[ScriptMethod(ResponseFormat = ResponseFormat.Json, UseHttpGet = true)]
public static List<BillRate> Get()
{
using (TimEntities db = new TimEntities())
{
int tableId = Int32.Parse(ddlTable.SelectedValue);
var defaultQry = from t1 in db.BillCostTableDatas
where t1.TableId == tableId
&& t1.ResourceId == 0 && t1.RoleId == 0
orderby t1.Rate
select new
{
id = t1.Id,
resource = "",
role = "",
rate = t1.Rate,
TierName = ""
};
var resourceQry = from t1 in db.BillCostTableDatas
join t2 in db.Machines on t1.ResourceId equals t2.Machine_ID
join t3 in db.TOMIS_USER on t2.Machine_User_ID equals t3.User_ID
join t4 in db.PricingTierNames on t1.PricingTierID equals t4.TierID
where t1.TableId == tableId
&& t1.ResourceId != 0
&& t1.RoleId == 0
orderby t3.LName, t3.FName, t1.Rate, t4.TierName
select new
{
id = t1.Id,
resource = t3.LName + ", " + t3.FName,
role = "",
rate = t1.Rate,
TierName = t4.TierName
};
var roleQry = from t1 in db.BillCostTableDatas
join t2 in db.TaskRoles on t1.RoleId equals t2.Id
where t1.TableId == tableId
&& t1.ResourceId == 2 && t1.RoleId != 0
orderby t2.Name, t1.Rate
select new
{
id = t1.Id,
resource = "",
role = t2.Name,
rate = t1.Rate,
TierName = ""
};
if (this.rblOptions.SelectedValue == "resource")
{
var results = from Res in resourceQry.ToList()
select new BillRate
{
};
return results.ToList();
}
else if (this.rblOptions.SelectedValue == "role")
{
var results = from Res in roleQry.ToList()
select new BillRate
{
};
return results.ToList();
}
else
{
var results = from Res in defaultQry.ToList()
select new BillRate
{
};
return results.ToList();
}
return null;
}
}
If you trigger your store loading manually, you can pass the params options to the load method.
Example:
var store = Ext.create('Ext.data.Store', {
// prevent the store from loading before we told it to do so
autoLoad: false
...
});
store.load({
params: {clientId: 123, queryType: 'default'}
...
});
If you want the params to be sent for multiple subsequent queries, you can write them in the extraParams property of the proxy.
Example:
var store = Ext.create('Ext.data.Store', { ... });
Ext.apply(store.getProxy().extraParams, {
clientId: 321
,queryType: 'role'
});
// the store will still need a refresh
store.reload();
The way these params are passed to the server will depend on the type of request. For GET ones, they will be appended as query params; for POST they will be embedded in the request body.