I have this code:
var graphicDataUrl = 'graphic-data.json';
var webDataUrl = 'web-data.json';
var templateHtml = 'templating.html';
var viewG = $('#view-graphic');
var viewW = $('#view-web');
$.getJSON(dataUrls, function(data) {
$.get(templateHtml, function(template) {
template = Handlebars.compile(template);
var example = template({ works: data });
viewG.html(example);
viewW.html(example);
});
});
What is the best way for call both webDataUrl and graphicDataUrl JSONs and use their data in order to display them in two different div (#viewG and #viewW)?
The best way is to do each one individually, and to handle error conditions:
$.getJSON(graphicDataUrl)
.then(function(data) {
// ...worked, put it in #view-graphic
})
.fail(function() {
// ...didn't work, handle it
});
$.getJSON(webDataUrl, function(data) {
.then(function(data) {
// ...worked, put it in #view-web
})
.fail(function() {
// ...didn't work, handle it
});
That allows the requests to happen in parallel, and updates the page as soon as possible when each request completes.
If you want to run the requests in parallel but wait to update the page until they both complete, you can do that with $.when:
var graphicData, webData;
$.when(
$.getJSON(graphicDataUrl, function(data) {
graphicData = data;
}),
$.getJSON(webDataUrl, function(data) {
webData = data;
})
).then(function() {
if (graphicData) {
// Worked, put graphicData in #view-graphic
}
else {
// Request for graphic data didn't work, handle it
}
if (webData) {
// Worked, put webData in #view-web
}
else {
// Request for web data didn't work, handle it
}
});
...but the page may seem less responsive since you're not updating when the first request comes back, but only when both do.
Just in case it is useful to anyone else who may come across this — and thanks to the Promise advances in jQuery — T.J. Crowder's answer can now be improved into one succinct and general function:
/**
* Load multiple JSON files.
*
* Example usage:
*
* jQuery.getMultipleJSON('file1.json', 'file2.json')
* .fail(function(jqxhr, textStatus, error){})
* .done(function(file1, file2){})
* ;
*/
jQuery.getMultipleJSON = function(){
return jQuery.when.apply(jQuery, jQuery.map(arguments, function(jsonfile){
return jQuery.getJSON(jsonfile);
})).then(function(){
var def = jQuery.Deferred();
return def.resolve.apply(def, jQuery.map(arguments, function(response){
return response[0];
}));
});
};
However the point about not giving any feedback to the user — whilst waiting for the full load — is a good one. So for those that prefer to give responsive feedback, here's a slightly more complicated version that supports progress.
/**
* Load multiple json files, with progress.
*
* Example usage:
*
* jQuery.getMultipleJSON('file1.json', 'file2.json')
* .progress(function(percent, count, total){})
* .fail(function(jqxhr, textStatus, error){})
* .done(function(file1, file2){})
* ;
*/
jQuery.getMultipleJSON = function(){
var
num = 0,
def = jQuery.Deferred(),
map = jQuery.map(arguments, function(jsonfile){
return jQuery.getJSON(jsonfile).then(function(){
def.notify(1/map.length * ++num, num, map.length);
return arguments;
});
})
;
jQuery.when.apply(jQuery, map)
.fail(function(){ def.rejectWith(def, arguments); })
.done(function(){
def.resolveWith(def, jQuery.map(arguments, function(response){
return response[0];
}));
})
;
return def;
};
This code is simple and you can access both response together in one function:
$.when(
$.getJSON(graphicDataUrl),
$.getJSON(webDataUrl)
).done(function(data1, data2) {
console.log(data1[0]);
console.log(data2[0]);
});
Related
What I am trying to do is create a chrome extension that creates new, nested, bookmark folders, using promises.
The function to do this is chrome.bookmarks.create(). However I cannot just
loop this function, because chrome.bookmarks.create is asynchronous. I need to wait until the folder is created, and get its new ID, before going on to its children.
Promises seem to be the way to go. Unfortunately I cannot find a minimal working example using an asynchronous call with its own callback like chrome.bookmarks.create.
I have read some tutorials 1, 2, 3, 4. I have searched stackOverflow but all the questions do not seem to be about plain vanilla promises with the chrome extension library.
I do not want to use a plugin or library: no node.js or jquery or Q or whatever.
I have tried following the examples in the tutorials but many things do not make sense. For example, the tutorial states:
The promise constructor takes one argument—a callback with two
parameters: resolve and reject.
But then I see examples like this:
const wait = ms => new Promise(resolve => setTimeout(resolve, ms));
How this works is a mystery to me.
Also, how can you call resolve() when its never been defined? No example in the tutorials seem to match real life code. Another example is:
function isUserTooYoung(id) {
return openDatabase() // returns a promise
.then(function(col) {return find(col, {'id': id});})
How do I pass in col, or get any results!
So if anyone can give me a minimal working example of promises with an asynchronous function with its own callback, it would be greatly appreciated.
SO wants code, so here is my non-working attempt:
//loop through all
function createBookmarks(nodes, parentid){
var jlen = nodes.length;
var i;
var node;
for(var i = 0; i < nodes.length; i++){
var node = nodes[i];
createBookmark(node, parentid);
}
}
//singular create
function createBookmark(node, parentid){
var bookmark = {
parentId : parentid,
index : node['index'],
title : node['title'],
url : node['url']
}
var callback = function(result){
console.log("creation callback happened.");
return result.id; //pass ID to the callback, too
}
var promise = new Promise(function(resolve, reject) {
var newid = chrome.bookmarks.create(bookmark, callback)
if (newid){
console.log("Creating children with new id: " + newid);
resolve( createBookmarks(bookmark.children, newid));
}
});
}
//allnodes already exists
createBookmarks(allnodes[0],"0");
Just doesn't work. The result from the callback is always undefined, which it should be, and I do not see how a promise object changes anything. I am equally mystified when I try to use promise.then().
var newid = promise.then( //wait for a response?
function(result){
return chrome.bookmarks.create(bookmark, callback);
}
).catch(function(error){
console.log("error " + error);
});
if (node.children) createBookmarks(node.children, newid);
Again, newid is always undefined, because of course bookmarks.create() is asynchronous.
Thank you for any help you can offer.
Honestly, you should just use the web extension polyfill. Manually promisifying the chrome APIs is a waste of time and error prone.
If you're absolutely insistent, this is an example of how you'd promisify chrome.bookmarks.create. For other chrome.* APIs, you also have to reject the callback's error argument.
function createBookmark(bookmark) {
return new Promise(function(resolve, reject) {
try {
chrome.bookmarks.create(bookmark, function (result) {
if (chrome.runtime.lastError) reject(chrome.runtime.lastError)
else resolve(result)
})
} catch (error) {
reject(error)
}
})
}
createBookmark({})
.then(function (result) {
console.log(result)
}).catch(function (error) {
console.log(error)
})
To create multiple bookmarks, you could then:
function createBookmarks(bookmarks) {
return Promise.all(
bookmarks.map(function (bookmark) {
return createBookmark(bookmark)
})
)
}
createBookmarks([{}, {}, {}, {}])
.catch(function (error) {
console.log(error)
})
Take the advantage of the convention that the callback function always be the last argument, I use a simple helper function to promisify the chrome API:
function toPromise(api) {
return (...args) => {
return new Promise((resolve) => {
api(...args, resolve);
});
};
}
and use it like:
toPromise(chrome.bookmarks.create)(bookmark).then(...);
In my use case, it just works most of the time.
when i execute the following code using protractor it works. I am passing nested json to for loop. Because of asynchronously working of for loop it print all values of variable i and reaches to last value because of this it always access last pair of username and password. How can i solve this issue?
var data = require('.../testdata.json');
describe('homepage Test', function() {
it('candidate login', function() {
browser.driver.get('https://abcxyz.com');
for (i in data.testdata) {
element(by.id('tool_btn3')).click();
console.log(i);
browser.getTitle().then(function(title) {
console.log("Title: " + title)
if (title === "<page title>") {
browser.driver.sleep(3000);
element(by.id('email_input')).sendKeys(data.testdata[i].username);
element(by.id('pwd_input')).sendKeys(data.testdata[i].password);
element(by.xpath('//*[#id="signIn_btn"]/div[2]')).click();
browser.sleep(3000);
element(by.id('setting_img')).click();
browser.sleep(2000);
element(by.id('logout_div')).click().then(function() {
console.log('success');
});
} else {
console.log("problem");
}
});
}
});
});
You need to keep in mind that you can't use a for-loop with promises. All is async so in the end it will bite you in the ass, meaning that the it is ready but the test isn't.
Based on you example it would suggest to make a method called for example logon (place it in a Page Object or something). It will do the logon and stuff for you. Add an empty promise-container (array) and push the promises in there.
When the for-loop is done you can resolve the complete promise-container at once and it will execute all the promises 1 after each other. It will look something like this.
var data = require('.../testdata.json');
describe('homepage Test', function() {
it('candidate login', function() {
var promises = [];
browser.driver.get('https://abcxyz.com');
for (i in data.testdata) {
promises.push(expect(logon(data.testdata[i].username, data.testdata[i].password)).to.equal(true));
promises.push(console.log(i));
}
Promise.all(promises);
});
});
/**
* Logon
* #params {string} username
* #params {string} password
* #return {boolean}
*/
function logon(username, password) {
element(by.id('tool_btn3')).click();\
return browser.getTitle().then(function(title) {
console.log("Title: " + title)
if (title === "<page title>") {
browser.driver.sleep(3000);
element(by.id('email_input')).sendKeys(username);
element(by.id('pwd_input')).sendKeys(password);
element(by.xpath('//*[#id="signIn_btn"]/div[2]')).click();
browser.sleep(3000);
element(by.id('setting_img')).click();
browser.sleep(2000);
return element(by.id('logout_div')).click()
.then(function() {
return Promise.resolve(true);
});
} else {
return Promise.resolve(false);
}
});
}
If you are using for example Node 7 you can use async/await, or use Babel to transpile the code. If you can write TypeScript you also get the async/await
This is my controller which is calling the login service
mod.controller("loginCtrl",function($scope,loginService,$http)
{
$scope.Userlogin = function()
{
var User = {
userid :$scope.uname,
pass:$scope.pass
};
var res = UserloginService(User);
console.log(res);
alert("login_succ");
}
});
And this is the login service code which takes the User variable and checks for username & password
mod.service("loginService",function($http,$q) {
UserloginService = function(User) {
var deffered = $q.defer();
$http({
method:'POST',
url:'http://localhost:8080/WebApplication4_1/login.htm',
data:User
}).then(function(data) {
deffered.resolve(data);
}).error(function(status) {
deffered.reject({
status:status
});
});
return deffered.promise;
// var response = $http({
//
// method:"post",
// url:"http://localhost:8080/WebApplication4_1/login.htm",
// data:JSON.stringify(User),
// dataType:"json"
// });
// return "Name";
}
});
I have created a rest api using springs which upon passing json return back the username and password in json like this
Console shows me this error for angular
You need to enable CORS for your application for guidance see this link
https://htet101.wordpress.com/2014/01/22/cors-with-angularjs-and-spring-rest/
I prefer to use Factory to do what you're trying to do, which would be something like this:
MyApp.factory('MyService', ["$http", function($http) {
var urlBase = "http://localhost:3000";
return {
getRecent: function(numberOfItems) {
return $http.get(urlBase+"/things/recent?limit="+numberOfItems);
},
getSomethingElse: function(url) {
return $http.get(urlBase+"/other/things")
},
search: function (searchTerms) {
return $http.get(urlBase+"/search?q="+searchTerms);
}
}
}]);
And then in your controller you can import MyService and then use it in this way:
MyService.getRecent(10).then(function(res) {
$scope.things = res.data;
});
This is a great way to handle it, because you're putting the .then in your controller and you are able to control the state of the UI during a loading state if you'd like, like this:
// initialize the loading var, set to false
$scope.loading = false;
// create a reuseable update function, and inside use a promise for the ajax call,
// which is running inside the `Factory`
$scope.updateList = function() {
$scope.loading = true;
MyService.getRecent(10).then(function(res) {
$scope.loading = false;
$scope.things = res.data;
});
};
$scope.updateList();
The error in the console shows two issues with your code:
CORS is not enabled in your api. To fix this you need to enable CORS using Access-Control-Allow-Origin header to your rest api.
Unhandled rejection error, as the way you are handling errors with '.error()' method is deprecated.
'Promise.error()' method is deprecated according to this and this commit in Angular js github repo.
Hence you need to change the way you are handling errors as shown below :
$http().then(successCallback, errorCallback);
function successCallback (res) {
return res;
}
function errorCallback (err) {
return err;
}
One more thing in your code which can be avoided is you have defined a new promise and resolving it using $q methods, which is not required. $http itself returns a promise by default, which you need not define again inside it to use it as a Promise. You can directly use $http.then().
I'm using ionic to make an app and I need store some data and read, I don't know how to do, you can give me some directions? So... be more specific... In this app have an option to see the history of order(is a buy app), so everytime an user make an order I need save and when he wish he can see all order what he makes, to do this I need load all json from storage and show, and i dont know how to save of the right way to read dynamically after?
You can use $cordovaFile services :
const fileName = "orders.json"
var getUserOrders = function () {
var d = $q.defer(),
userOrders;
$cordovaFile.checkFile(cordova.file.dataDirectory, fileName).then(
function (success) {
$cordovaFile.readAsText(cordova.file.dataDirectory, fileName).then(
function (data) {
d.resolve(JSON.parse(data));
}, function (error) {
...
});
}, function (error) {
// No orders saved
d.resolve([]);
}
);
};
var saveAnOrder = function (order) {
var d = $q.defer(),
orderToSave = order;
getUserOrders().then(
function (data) {
var userOrders = data;
userOrders.push(orderToSave);
$cordovaFile.writeFile(cordova.file.dataDirectory, fileName, JSON.stringify(userOrders), true).then(
function (success) {
d.resolve(userOrders);
}, function (error) {
...
});
}, function (error) {
...
}
);
};
I'm using sails 0.11.2. With the latest sails-mongo adapter.
I have a very large database (gigabytes of data) of mainly timestamp and values. And i make queries on it using the blueprint api.
If I query using localhost:1337/datatable?limit=100000000000 the nodejs hangs on 0.12 with a lot of CPU usage, and crashes on v4. It crashes on the toJSON function.
I've finded out that i need to make multiple queries on my API. But I don't how to proceed to make it.
How can i make multiple queries that "don't explode" my server?
Update:
On newer version 0.12.3 with latest waterline and sails-mongo, the queries goes much smoother. The crashes on the cloud was that I didn't had enough RAM to handle sailsjs and mongodb on same T2.micro instance.
I've moved the mongodb server to a M3.Medium instance. And now the server don't crash anymore, but it freezes. I'm using skip limit and it works nicely for sails.js but for mongodb is a great waste of resources!
Mongodb make an internal query using limit = skip + limit. and then moves the cursor to the desired data and returns. When you are making a lot's in pagination you are using lots of internal queries. As the query size will increase.
As this article explains, the way to get around the waste of resources in MongoDB is to avoid using skip and cleverly use _id as part of your query.
I did not use sails mongo but I did implement the idea above by using mongo driver in nodejs:
/**
* Motivation:
* Wanted to put together some code that used:
* - BlueBird (promises)
* - MongoDB NodeJS Driver
* - and paging that did not rely on skip()
*
* References:
* Based on articles such as:
* https://scalegrid.io/blog/fast-paging-with-mongodb/
* and GitHub puclic code searches such as:
* https://github.com/search?utf8=%E2%9C%93&q=bluebird+MongoClient+_id+find+limit+gt+language%3Ajavascript+&type=Code&ref=searchresults
* which yielded smaple code hits such as:
* https://github.com/HabitRPG/habitrpg/blob/28f2e9c356d7053884107d90d04e28dde75fa81b/migrations/api_v3/coupons.js#L71
*/
var Promise = require('bluebird'); // jshint ignore:line
var _ = require('lodash');
var MongoClient = require('mongodb').MongoClient;
var dbHandleForShutDowns;
// option a: great for debugging
var logger = require('tracer').console();
// option b: general purpose use
//var logger = console;
//...
var getPage = function getPage(db, collectionName, query, projection, pageSize, processPage) {
//console.log('DEBUG', 'filter:', JSON.stringify(query,null,2));
projection = (projection) ? projection['_id']=true : {'_id':true};
return db
.collection(collectionName)
.find(query)
.project(projection)
.sort({'_id':1}).limit(pageSize)
.toArray() // cursor methods return promises: http://mongodb.github.io/node-mongodb-native/2.1/api/Cursor.html#toArray
.then(function processPagedResults(documents) {
if (!documents || documents.length < 1) {
// stop - no data left to traverse
return Promise.resolve();
}
else {
if (documents.length < pageSize) {
// stop - last page
return processPage(documents);
}
else {
return processPage(documents) // process the results of the current page
.then(function getNextPage(){ // then go get the next page
var last_id = documents[documents.length-1]['_id'];
query['_id'] = {'$gt' : last_id};
return getPage(db, collectionName, query, projection, pageSize, processPage);
});
}
}
});
};
//...
return MongoClient
.connect(params.dbUrl, {
promiseLibrary: Promise
})
.then(function(db) {
dbHandleForShutDowns = db;
return getPage(db, collectionName, {}, {}, 5, function processPage(pagedDocs){console.log('do something with', pagedDocs);})
.finally(db.close.bind(db));
})
.catch(function(err) {
console.error("ERROR", err);
dbHandleForShutDowns.close();
});
The following two sections show how the code manipulates _id and makes it part of the query:
.sort({'_id':1}).limit(pageSize)
// [...]
var last_id = documents[documents.length-1]['_id'];
query['_id'] = {'$gt' : last_id};
Overall code flow:
Let getPage() handle the work, you can set the pageSize and query to your liking:
return getPage(db, collectionName, {}, {}, 5, function processPage(pagedDocs){console.log('do something with', pagedDocs);})
Method signature:
var getPage = function getPage(db, collectionName, query, projection, pageSize, processPage) {
Process pagedResults as soon as they become available:
return processPage(documents) // process the results of the current page
Move on to the next page:
return getPage(db, collectionName, query, projection, pageSize, processPage);
The code will stop when there is no more data left:
// stop - no data left to traverse
return Promise.resolve();
Or it will stop when working on the last page of data:
// stop - last page
return processPage(documents);
I hope this offers some inspiration, even if its not an exact solution for your needs.
1. run aggregate
const SailsMongoQuery = require('sails-mongo/lib/query/index.js')
const SailsMongoMatchMongoId = require('sails-mongo/lib/utils.js').matchMongoId
const fn = model.find(query).paginate(paginate)
const criteria = fn._criteria
const queryLib = new SailsMongoQuery(criteria, {})
const queryOptions = _.omit(queryLib.criteria, 'where')
const where = queryLib.criteria.where || {}
const queryWhere = Object.keys(where).reduce((acc, key) => {
const val = where[key]
acc[key] = SailsMongoMatchMongoId(val) ? new ObjectID(val) : val
return acc
}, {})
const aggregate = [
{ $match: queryWhere }
].concat(Object.keys(queryOptions).map(key => ({ [`$${key}`]: queryOptions[key] })))
// console.log('roge aggregate --->', JSON.stringify(aggregate, null, 2))
model.native((err, collection) => {
if (err) return callback(err)
collection.aggregate(aggregate, { allowDiskUse: true }).toArray(function (err, docs) {
if (err) return callback(err)
const pk = primaryKey === 'id' ? '_id' : primaryKey
ids = docs.reduce((acc, doc) => [...acc, doc[pk]], [])
callback()
})
})
2. run sails find by id`s
query = Object.assign({}, query, { [primaryKey]: ids }) // check primary key in sails model
fn = model.find(query) // .populate or another method
fn.exec((err, results) => { console.log('result ->>>>', err, results) })