I want to retrieve data from a model provider, but all I am getting got is 'undefined' in my controller.
Here is the code:
Controller:
pdmAtWeb.controller('SearchCtrl', function($scope, ItemModel){
$scope.updateTableFromSearch = function(){
$scope.myData = ItemModel.findAllItems();
console.log($scope.myData);
};});
Provider
pdmAtWeb.provider('ItemModel', function () {
this.defaultEndpoint = '/item';
this.defaultServiceUrl = 'http://localhost:8080/webservice';
this.setDefaultEndpoint = function (newEndpoint) {
this.defaultEndpoint = newEndpoint;
};
this.setDefaultServiceUrl = function (newServiceUrl) {
this.defaultServiceUrl = newServiceUrl;
}
this.$get = function ($http) {
var endpoint = this.endpoint;
var serviceUrl = this.serviceUrl;
var refreshConnection = function () {
// reconnect
}
return{
findAllItems: function () {
$http({method: 'GET', url: serviceUrl + endpoint}).
success(function (data, status, headers, config) {
console.log(data);
return data;
}).
error(function (data, status, headers, config) {
});
}
}
}});
The provider "ItemModel" receives the correct data from the web service. Perhaps this is an async problem, but I'm not sure.
UPDATE
After adding a deferred/promise implementation it works as expected. Here is the final code:
Controller:
pdmAtWeb.controller('SearchCtrl', function($scope, ItemModel){
$scope.updateTableFromSearch = function(){
ItemModel.findAllItems().then(function(data){
console.log(data);
$scope.myData = data;
});
};
});
Provider
pdmAtWeb.provider('ItemModel', function () {
this.defaultEndpoint = '/item';
this.defaultServiceUrl = 'http://localhost:8080/webservice';
this.setDefaultEndpoint = function (newEndpoint) {
this.defaultEndpoint = newEndpoint;
};
this.setDefaultServiceUrl = function (newServiceUrl) {
this.defaultServiceUrl = newServiceUrl;
}
this.$get = function ($http, $q) {
var endpoint = this.defaultEndpoint;
var serviceUrl = this.defaultServiceUrl;
var refreshConnection = function () {
// reconnect
}
return{
findAllItems: function () {
var deferred = $q.defer();
$http({method: 'GET', url: serviceUrl + endpoint}).
success(function (data, status, headers, config) {
deferred.resolve(data);
}).
error(function (data, status, headers, config) {
deferred.reject();
});
return deferred.promise;
}
}
}
});
You dont need a deferred to accomplish this. Your $http already returns a promise. In your first example, the reason you are getting an undefined is because you are not returning anything. Check your findAllItems . It is not returning anything.
If you do return $http.get(.....) everything should work without using deferred explicitly.
Here is the corrected version :
pdmAtWeb.provider('ItemModel', function () {
this.defaultEndpoint = '/item';
this.defaultServiceUrl = 'http://localhost:8080/webservice';
this.setDefaultEndpoint = function (newEndpoint) {
this.defaultEndpoint = newEndpoint;
};
this.setDefaultServiceUrl = function (newServiceUrl) {
this.defaultServiceUrl = newServiceUrl;
}
this.$get = function ($http) {
var endpoint = this.endpoint;
var serviceUrl = this.serviceUrl;
var refreshConnection = function () {
// reconnect
}
return{
findAllItems: function () {
//NOTE addition of return below.
return $http({method: 'GET', url: serviceUrl + endpoint}).
success(function (data, status, headers, config) {
console.log(data);
//NOTE: YOU SHOULD ALSO return data here for it to work.
return data;
}).
error(function (data, status, headers, config) {
});
}
}
}});
Related
I am trying to develop a dApp using truffle and ganache. deployed() is returning undefined so I cannot use that for creating an instance of the contract.
Here is the code:
App = {
web3Provider: null,
contracts: {},
init: async function() {
$('#category').append('<option value="Clothing">Clothing</option>');
return await App.initWeb3();
},
initWeb3: async function() {
if (window.ethereum) {
App.web3Provider = window.ethereum;
try {
await window.ethereum.request({ method: "eth_requestAccounts" });
} catch (error) {
console.error("User denied account access");
}
}
else if (window.web3) {
App.web3Provider = window.web3.currentProvider;
}
else {
App.web3Provider = new Web3.providers.HttpProvider('http://localhost:8545');
}
web3 = new Web3(App.web3Provider);
return await App.initContract();
},
initContract: function() {
$.getJSON('Marketplace.json', function(data) {
// Get the necessary contract artifact file and instantiate it with #truffle/contract
var MarketplaceArtifact = data;
App.contracts.Marketplace = TruffleContract(MarketplaceArtifact);
// Set the provider for our contract
App.contracts.Marketplace.setProvider(App.web3Provider);
});
return App.initUI();
},
initUI: function() {
var marketplaceInstance;
App.contracts.Marketplace.deployed().then(function(instance) {
marketplaceInstance = instance;
return marketplaceInstance.getCategories.call();
}).then(function(categories) {
alert(categories[1]);
for(i = 0; i < categories.length; i++) {
$('#category').append('<option value="' + categories[i] + '">' + categories[i] + '</option>');
}
}).catch(function(err) {
console.log(err.message);
});
}
};
$(function() {
$(window).load(function() {
App.init();
});
});
I am getting an exception as follows in this line:
App.contracts.Marketplace.deployed().then(function(instance)
The exception is:
Uncaught (in promise) TypeError: Cannot read properties of undefined (reading 'deployed')
at Object.initUI (register-nonprofit.js:49:31)
the deployed() function should return the contract instance, but it is not. Please help.
when I execute Jest in Javascript test with AWS mock via npm, it will be Failure.
because I use singleton class.
The difference like here.
「module.exports = Users;」 or 「module.exports = new Users();」
I guess AWS mock doesn't work with singleton class.
in that cause, how should I do to solve this problem?
'use strick';
var aws = require('aws-sdk')
aws.config.update({region:'ap-northeast-1'})
class Users {
constructor() {
this.table = 'Users'
this.dynamodb = new aws.DynamoDB()
}
getData(email) {
let params = {
TableName: this.table,
Key : { 'email': {'S':email} }
}
return this.dynamodb.getItem(params).promise()
}
}
// module.exports = Users // ← this will be success.
module.exports = new Users(); // ← this will be failure.
'use strict';
var aws = require('aws-sdk-mock'),
users = require('./user'),
chai = require('chai'),
path = require('path'),
should = chai.should(),
input = 'test#gmail.com',
usersObj;
aws.setSDK(path.resolve('node_modules/aws-sdk'));
describe('All Tests', function () {
// this.timeout(0);
beforeEach(function () {
aws.mock('DynamoDB', 'getItem', function (params, callback) {
callback(null, {Item: {email: params.Key.email.S}});
});
// usersObj = new users(); ← this will be success.
usersObj = users; // ← this will be failure.
});
it('getData', function (done) {
usersObj.getData(input).then(function (res) {
console.log(res);
res.Item.email.should.equal(input);
done();
});
});
});
This line:
module.exports = new Users();
...means that a Users object will get created as soon as the code runs...and it runs as soon as user.js is required.
This line:
users = require('./user')
...is at the top of your test file and this line:
aws.mock('DynamoDB', 'getItem', function (params, callback) {
callback(null, {Item: {email: params.Key.email.S}});
});
...is in a beforeEach...
...which means that user.js is required and runs before the mock has been created...which causes the test to fail.
If you are going to export an instance of Users then you just need to make sure you don't require the user.js file in your test until after you have set up your mock:
var aws = require('aws-sdk-mock'),
chai = require('chai'),
path = require('path'),
input = 'test#gmail.com',
usersObj;
chai.should()
aws.setSDK(path.resolve('node_modules/aws-sdk'));
describe('All Tests', function () {
beforeEach(function () {
aws.mock('DynamoDB', 'getItem', function (params, callback) {
callback(null, { Item: { email: params.Key.email.S } });
}); // <= set up the mock first...
usersObj = require('./user'); // <= ...then require user.js
});
it('getData', function (done) {
usersObj.getData(input).then(function (res) {
res.Item.email.should.equal(input); // Success!
done();
});
});
});
I could resolve this pattern too.
'use strict';
var aws = require('aws-sdk-mock'),
users = require('./user'),
chai = require('chai'),
path = require('path'),
should = chai.should(),
input = 'test#gmail.com',
usersObj;
const awsObject = require('aws-sdk');
aws.setSDK(path.resolve('node_modules/aws-sdk'));
describe('All Tests', function () {
// this.timeout(0);
beforeEach(function () {
aws.mock('DynamoDB', 'getItem', function (params, callback) {
callback(null, {Item: {email: params.Key.email.S}});
});
// it will be resolve problem by creating new AWS instance.
users.dynamodb = new awsObject.DynamoDB();
});
it('getData', function (done) {
users.getData(input).then(function (res) {
console.log(res);
res.Item.email.should.equal(input);
done();
});
});
});
You must call the aws client inside the class constructor
class MyClass {
constructor(){
this.dynamodb = new DynamoDB.DocumentClient({ region: "us-west-2" });
}
...
In the test file you must create a new instance of your class just after call de AWSMock. Example:
it('Should save on dinamoDB with param atributes void()', async () => {
AWSMock.mock('DynamoDB.DocumentClient', 'update', function (params, callback){
callback(null, { Attributes: { currentValue: 1 } } );
});
AWSMock.mock('DynamoDB.DocumentClient', 'put', function (params, callback){
callback(null, true);
});
const myClass = new MyClass();
...
Why is User.Identity.IsAuthenticated == false when called via CORS, but true when called via same domain?
I have a working asp.net core 2 cookieauth app that is CORS enabled.
When I call;
api/Identity/establish-session
an AUTHCOOKIE gets dropped in both
CORS and local ajax calls.
Conversely when I call
api/Identity/sign-out
The AUTHCOOKIE gets removed. All good so far.
After a successful establish-session, when I call the following;
api/Identity/check-authentication
User.Identity.IsAuthenticated == false when called via CORS, but User.Identity.IsAuthenticated == true when called from the same domain.
I don't know if this is because of how I call it in javascript or if I have something configured wrong on the asp.net app. I thought I just had to have credentials: 'include' set in my fetch call?
[Produces("application/json")]
[Route("api/Identity")]
public class IdentityController : Controller
{
[HttpPost]
[AllowAnonymous]
[Route("establish-session")]
public async Task EstablishAuthenticatedSession(string username, string password)
{
var properties = new AuthenticationProperties
{
IsPersistent = true,
ExpiresUtc = DateTime.UtcNow.AddHours(1)
};
var claims = new[] {new Claim("name", username), new Claim(ClaimTypes.Role, "User")};
var identity = new ClaimsIdentity(claims, CookieAuthenticationDefaults.AuthenticationScheme);
await
HttpContext.SignInAsync(CookieAuthenticationDefaults.AuthenticationScheme,
new ClaimsPrincipal(identity),
properties);
}
[HttpGet]
[AllowAnonymous]
[Route("sign-out")]
public async Task Logout()
{
await HttpContext.SignOutAsync(CookieAuthenticationDefaults.AuthenticationScheme);
}
[HttpGet]
[AllowAnonymous]
[Route("check-authentication")]
public async Task<bool> CheckAuthentication()
{
return User.Identity.IsAuthenticated;
}
}
Here is my javascript snippets;
establishAuthenticatedSession(){
let self = this;
var model = this.get();
console.log(model);
var url = "https://localhost:44310/api/Identity/establish-session?username=herb&password=1234";
fetch(url,
{
credentials: 'include',
headers: { 'Content-Type': 'text/plain' },
method: 'POST'
})
.then(function (res) {
console.log(res);
self.set({ establishSession:{ message:"Success" }});
}).catch(function(error) {
self.set({ establishSession:{ message:error.message }});
console.log('There has been a problem with your fetch operation: ' + error.message);
});
},
signOut(){
let self = this;
var model = this.get();
console.log(model);
var url = "https://localhost:44310/api/Identity/sign-out";
fetch(url,
{
credentials: 'include',
headers: { 'Content-Type': 'text/plain' },
method: 'GET'
})
.then(function (res) {
console.log(res);
self.set({ signoutResult:{ message:"Success" }});
}).catch(function(error) {
self.set({ signoutResult:{ message:error.message }});
console.log('There has been a problem with your fetch operation: ' + error.message);
});
},
checkAuthenticatedSession(){
let self = this;
var model = this.get();
console.log(model);
var url = "https://localhost:44310/api/Identity/check-authentication";
fetch(url,
{
credentials: 'include',
method: 'GET',
headers: { 'Content-Type': 'text/plain' }
})
.then(res => res.text())
.then(function (res) {
console.log(res);
self.set({ checkAuthenticatedSession:{ message:res }});
})
.catch(function(error) {
self.set({ checkAuthenticatedSession:{ message:error.message }});
console.log('There has been a problem with your fetch operation: ' + error.message);
});
}
This is my CORS setup;
services.AddCors(options =>
{
options.AddPolicy("CorsPolicy",
builder => builder
.AllowAnyOrigin()
.AllowAnyMethod()
.AllowAnyHeader()
.AllowCredentials());
});
So it turns out that the cookie needs to be set as SameSiteMode.None. The hint I got was that that ARRAfinity cookie from azure as set to non and it was being sent where mine was not.
In my app I had to set it as follows;
public class Startup
{
...
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
...
services.AddAuthentication(sharedOptions =>
{
sharedOptions.DefaultAuthenticateScheme = CookieAuthenticationDefaults.AuthenticationScheme;
sharedOptions.DefaultSignInScheme = CookieAuthenticationDefaults.AuthenticationScheme;
// sharedOptions.DefaultChallengeScheme = OpenIdConnectDefaults.AuthenticationScheme;
})
.AddCookie(
CookieAuthenticationDefaults.AuthenticationScheme,
options =>
{
options.LoginPath = "/Account/LogIn"; ;
options.AccessDeniedPath = new PathString("/account/login");
options.Cookie.Name = "AUTHCOOKIE";
options.ExpireTimeSpan = new TimeSpan(365, 0, 0, 0);
options.Cookie.SecurePolicy = CookieSecurePolicy.SameAsRequest;
options.Cookie.SameSite = SameSiteMode.None;
}
);
...
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IHostingEnvironment env)
{
...
var cookiePolicyOptions = new CookiePolicyOptions
{
Secure = CookieSecurePolicy.SameAsRequest,
MinimumSameSitePolicy = SameSiteMode.None
};
app.UseCookiePolicy(cookiePolicyOptions);
...
}
}
I want to use custom API to evaluate data which are posted by applications but remote methods are not accepted in middleware in loopback
module.exports = function () {
const http = require('https');
var request = require('request');
var { Lib } = require('Lib');
var lib = new Lib;
verification.checkID = function (ID, cb) {
cb(null, 'ID is :' + ID);
}
verification.remoteMethod('greet', {
accepts: {
arg: 'ID',
type: 'string'
},
returns: {
arg: 'OK',
type: 'string'
}
});
module.exports = function () {
const http = require('https');
var request = require('request');
var { Lib } = require('Lib');
var lib = new Lib;
verification.checkID = function (ID, cb) {
cb(null, 'ID is :' + ID);
}
verification.remoteMethod('greet', {
'http': { // add the verb here
'path': '/greet',
'verb': 'post'
},
accepts: {
arg: 'ID',
type: 'string'
},
returns: {
arg: 'OK',
type: 'string'
}
});
Update
module.exports = function(server) {
// Install a `/` route that returns server status
var router = server.loopback.Router();
router.get('/', server.loopback.status());
router.get('/ping', function(req, res) { // your middle ware function now you need to call the next() here
res.send('pong');
});
server.use(router);
};
To evaluate is something i am not getting please check this link too Intercepting error handling with loopback
Regarding to fallowing question How to make a simple API for post method?
I find my solution in fallowing way:
module.exports = function(server) {
const https = require('https');
var request = require('request');
return function verification(req, res, next) {
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE');
res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
res.setHeader('Access-Control-Allow-Credentials', true);
var request;
var response;
var body = '';
// When a chunk of data arrives.
req.on('data', function (chunk) {
// Append it.
body += chunk;
});
// When finished with data.
req.on('end', function () {
// Show what just arrived if POST.
if (req.method === 'POST') {
console.log(body);
}
// Which method?
switch (req.method) {
case 'GET':
Verify url and respond with appropriate data.
handleGet(req, res);
Response has already been sent.
response = '';
break;
case 'POST':
// Verify JSON request and respond with stringified JSON response.
response = handlePost(body);
break;
default:
response = JSON.stringify({ 'error': 'Not A POST' });
break;
}
// Send the response if not empty.
if (response.length !== 0) {
res.write(response);
res.end();
}
// Paranoid clear of the 'body'. Seems to work without
// this, but I don't trust it...
body = '';
});
// If error.
req.on('error', function (err) {
res.write(JSON.stringify({ 'error': err.message }));
res.end();
});
//
};
function handlePost(body) {
var response = '';
var obj = JSON.parse(body);
// Error if no 'fcn' property.
if (obj['fcn'] === 'undefined') {
return JSON.stringify({ 'error': 'Request method missing' });
}
// Which function.
switch (obj['fcn']) {
// Calculate() requres 3 arguments.
case 'verification':
// Error if no arguments.
if ((obj['arg'] === 'undefined') || (obj['arg'].length !== 3)) {
response = JSON.stringify({ 'error': 'Arguments missing' });
break;
}
// Return with response from method.
response = verification(obj['arg']);
break;
default:
response = JSON.stringify({ 'error': 'Unknown function' });
break;
}
return response;
};
function verification(arg) {
var n1 = Number(arg[0]);
var n2 = Number(arg[1]);
var n3 = Number(arg[2]);
var result;
// Addem up.
result = n1 + n2 + n3;
// Return with JSON string.
return JSON.stringify({ 'result': result });
};
};
I am working in a AWS Lambda function. I am successfully making an API call to the NASA APOD and getting back the values. I want to take the url for the image and download that image and then upload into S3. I am getting an error when I try to access the "test.jpg" image, "Error: EACCES: permission denied, open 'test.jpg'". If I move the S3bucket.putObject outside the http.request, I get data is equal to null. I know I am missing something simple. Thought?
function GetAPOD(intent, session, callback) {
var nasa_api_key = 'demo-key'
, nasa_api_path = '/planetary/apod?api_key=' + nasa_api_key;
var options = {
host: 'api.nasa.gov',
port: 443,
path: nasa_api_path,
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf-8');
var responseString = '';
res.on('data', function (data) {
responseString += data;
});
res.on('end', function () {
console.log('API Response: ' + responseString);
var responseObject = JSON.parse(responseString)
, image_date = responseObject['date']
, image_title = responseObject['title']
, image_url = responseObject['url']
, image_hdurl = responseObject['hdurl']
, image_desc = responseObject['explanation'];
var s3Bucket = new AWS.S3( { params: {Bucket: 'nasa-apod'} } );
var fs = require('fs');
var file = fs.createWriteStream("test.jpg");
var request = http.get(image_url, function(response) {
response.pipe(file);
var data = {Key: "test.jpg", Body: file};
s3Bucket.putObject(data, function(err, data) {
if (err) {
console.log('Error uploading data: ', data);
}
else {
console.log('succesfully uploaded the image!');
}
});
});
});
});
req.on('error', function (e) {
console.error('HTTP error: ' + e.message);
});
//req.write();
req.end();
}
You need to be writing the file to /tmp. That's the only directory in the Lambda environment that you will have write access to.
I got it!! Thank you Mark B for the help. I was able to get the data from the stream without saving it locally and then writing to the bucket. I did have to change my IAM role to allow the putObject for S3.
function GetAPOD(intent, session, callback) {
var nasa_api_key = 'demo-key'
, nasa_api_path = '/planetary/apod?api_key=' + nasa_api_key;
var options = {
host: 'api.nasa.gov',
port: 443,
path: nasa_api_path,
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf-8');
var responseString = '';
res.on('data', function (data) {
responseString += data;
});
res.on('end', function () {
// console.log('API Response: ' + responseString);
var responseObject = JSON.parse(responseString)
, image_date = responseObject['date']
, image_title = responseObject['title']
, image_url = responseObject['url']
, image_hdurl = responseObject['hdurl']
, image_desc = responseObject['explanation'];
var image_name = image_date + '.jpg';
var s3 = new AWS.S3();
var s3Bucket = new AWS.S3( { params: {Bucket: 'nasa-apod'} } );
var request = http.get(image_url, function(response) {
var image_stream = null;
response.on('data', function (data) {
image_stream = data;
});
response.on('end', function () {
var param_data = {Key: image_name, Body: image_stream, ContentType: "image/jpeg", ContentLength: response.headers['content-length']};
s3Bucket.putObject(param_data, function(err, output_data) {
if (err) {
console.log('Error uploading data to S3: ' + err);
}
});
});
});
request.end();
});
});
req.on('error', function (e) {
console.error('HTTP error: ' + e.message);
});
req.end();
}