How to get total file count of a specific folder including subfolder - box-api

I know there is NOT a direct way to get all files + folders of a hierarchy of BOX folder. we have to recursively get items of subfolders.
However, if I simply need a count estimation, is it possible to get the basic info?
The Transferring tool https://www.multcloud.com can estimate the file count firstly, after it is done, it will start the transferring. In my observation, it looks it also recursively iterates the folders, but how it can know the recursive has completed?
Thank you for any hints!

If your tool is executed on the same machine on different dates, you can maintain some "estimation data" (i.e. persist the number of files of certain directories in a run, and use this old number of files contained in a subtree as estimation values for further runs)

Hi finally found it is just a common question on how to determine a recursively iteration has completed. The other post helped a lot. How to detect completion of recursive asynchronous calls in javascript
With the hints, my code works well. The below is the script for reference. It can be converted to Promise. I have not tried, though.
function buildTreeNodes(boxTopFolderId){
function startBuild(){
//get the top folder info
box.folders.getItems(boxTopFolderId ,
function (err, data) {
var results = [];
results.finished = 0;
var len = data.entries.length;
if (err){
console.log(err);
}
else{
//iterate from the top folder
data.entries.forEach(function (item, index) {
// BOX of file or folder
var boxid = item.id;
// file or folder
var boxtype = item.type;
//item name
var boxname = item.name;
if(boxtype === 'folder'){
recursiveFolder(boxid,function(result){
results[index] = result;
if (++results.finished == len) {
//recursion done!
}
});
}else{
results[index] = item;
if (++results.finished == len) {
//recursion done!
}
}
}); //end for each
if(len == 0)
//recursion done!
}
});
}
function recursiveFolder(folderId,callback){
box.folders.getItems(folderId,
function (err, data) {
var results = [];
results.finished = 0;
var len = data.entries.length;
if (err){
console.log(err);
}
else{
//iterate from the top folder
data.entries.forEach(function (item, index) {
// BOX of file or folder
var boxid = item.id;
// file or folder
var boxtype = item.type;
//item name
var boxname = item.name;
if(boxtype === 'folder'){
recursiveFolder(boxid,function(result)
{
results[index] = result;
if (++results.finished == len) {
callback(results);
}
});
}
else{
results[index] = item;
if (++results.finished == len){
callback(results);
}
}
});
if(len == 0)
callback(results);
}
});
}
startBuild();
}

Related

Get:Properties Selection Component. Autodesk Forge

I am trying to generate a list of elements (dbIDs) of a navisworks model that comes from revit and istram IFC files. I have a problem in the selection tree levels since I need the first elements of each object and for revit files I select the body. How can I build this code so that it reads me the components of a higher level?
If someone can help me, I would really appreciate it
constructor(viewer) {
this._modelData = {};
this._viewer = viewer;
}
init(callback) {
var _this = this;
_this.getAllLeafComponents(function (dbIds) {
console.log(dbIds);
var count = dbIds.length;
dbIds.forEach(function (dbId) {
viewer.getProperties(dbId, function (props) {
props.properties.forEach(function (prop) {
if (!isNaN(prop.displayValue)) return; // let's not categorize properties that store numbers
// some adjustments for revit:
prop.displayValue = prop.displayValue.replace('Revit ', ''); // remove this Revit prefix
if (prop.displayValue.indexOf('<') == 0) return; // skip categories that start with <
// ok, now let's organize the data into this hash table
if (_this._modelData[prop.displayName] == null) _this._modelData[prop.displayName] = {};
if (_this._modelData[prop.displayName][prop.displayValue] == null) _this._modelData[prop.displayName][prop.displayValue] = [];
_this._modelData[prop.displayName][prop.displayValue].push(dbId);
})
if ((--count) === 1) callback();
});
})
});

Node.js asynchronous nested mysql queries

To set the scenario for the code, the database stores Documents, and each document has the potential to have Images associated with them.
I have been trying to write a route that queries the database for each Document that has Images related to them, storing this data in JSON which is returned to the ajax request when completed, so the data can be viewed on the page. The closest I have got so far is the below attempt (see code).
router.post('/advanced_image_search', userAuthenticated, function(req, res, next) {
async.waterfall([
// First function is to get each document which has an image related
function getDocuments(callback){
connection.query(`SELECT DISTINCT(Document.document_id), Document.doc_name, Document.doc_version_no, Document.doc_date_added
FROM Document WHERE doc_type = 'image'`, function(err, results) {
if (err) {
callback(err, null);
return;
}
// The Object containing the array where the data from the db needs to be stored
var documents = {
'docs': []
};
// foreach to iterate through each result found from the first db query (getDocuments)
results.forEach(function(result) {
// New object to store each document
var document = {};
document.entry = result;
// This is the array where each image assciated with a document will be stored
document.entry.images = [];
// Push each document to the array (above)
documents.docs.push(document);
var doc_id = result.document_id;
})
// Returning the results as 'documents' to the next function
callback(null, documents);
})
},
function getImages(documents, callback){
// Variable assignement to the array of documents
var doc_array = documents.docs;
// Foreach of the objects within document array
async.forEachOf(doc_array, function(doc, key, callback){
// Foreach object do the following series of functions
async.waterfall([
function handleImages(callback){
// The id of the document to get the images for
var doc_id = doc.entry.document_id;
connection.query(`SELECT * FROM Image, Document WHERE Image.document_id = '${doc_id}' AND Image.document_id = Document.document_id`, function(err, rows) {
if (err) {
callback(err, null);
return;
}
callback(null, rows);
})
},
// Function below to push each image to the document.entry.images array
//
function pushImages(rows, callback){
// If multiple images are found for that document, the loop iterates through each pushing to the images array
for (var j = 0; j < rows.length; j++) {
// Creating new object for each image found so the data can be stored within this object, then pushed into the images array
var image = {
'image_name': rows[j].image_name
};
doc.entry.images.push(image);
}
callback(null, doc_array);
}
], function(err, doc_array){
if (err) {
console.log('Error in second waterfall callback:')
callback(err);
return;
}
console.log(doc.entry);
// callback(null, doc_array);
})
}, function(err, doc_array){
if (err) {
callback(err);
return;
}
callback(null, doc_array);
});
callback(null, doc_array);
}
], function(err, doc_array) {
if (err){
console.log('Error is: '+err);
return;
}
// The response that should return each document with each related image in the JSON
res.send(doc_array);
})
});
At the moment the results returned are:
1:
{entry: {document_id: 1, doc_name: "DocumentNameHere", doc_version_no: 1,…}}
entry:
{document_id: 1, doc_name: "DocumentNameHere", doc_version_no: 1,…}
doc_date_added:"2016-10-24"
doc_name:"DocumentNameHere"
doc_version_no:1
document_id:1
images:[]
As can be seen above, the images array remains empty even though with testing, the images are being found (console.log).
I hope someone is able to assist with this, as I am struggling to find the problem with this complex one.
Thanks
There are several async operations going on here and each operation needs a callback. See revised code:
router.post('/advanced_image_search', userAuthenticated, function(req, res, next) {
var getDocuments = function(next) {
// Function for getting documents from DB
var query = `SELECT DISTINCT(Document.document_id), Document.doc_name, Document.doc_version_no, Document.doc_date_added FROM Document WHERE doc_type = 'image'`; // Set the query
connection.query(query, function(err, results) {
// Run the query async
if(err) {
// If err end execution
next(err, null);
return;
}
var documentList = []; // Array for holding docs
for(var i=0; i<results.length; i++) {
// Loop over results, construct the document and push to an array
var documentEntry = results[i];
var documentObject = {};
documentObject.entry = documentEntry;
documentObject.entry.images = [];
documentObject.id = documentEntry.document_id;
documentList.push(documentObject);
}
next(null, documents); // Pass to next async operation
});
};
var getImages = function(documents, next) {
// Function for getting images from documents
var finalDocs = []; // Blank arry for final documents with images
for (var i=0; i<documents.length; i++) {
// Loop over each document and construct the query
var id = documents[i].id;
var query = `SELECT * FROM Image, Document WHERE Image.document_id = '${doc_id}' AND Image.document_id = Document.document_id`;
connection.query(query, function(err, images) {
// Execute the query async
if(err) {
// Throw error to callback
next(err, null);
return;
}
var processedDoc = processImages(documents[i], images); // Call a helper function to process all images into the document object
finalDocs.push(processedDoc); // Push the processed doc
if(i === documents.length) {
// If there are no more documents move onto next async
next(null, finalDocs);
}
});
}
};
var processImages = function(doc, images) {
for (var i=0; i< images.length; i++) {
// Loop over each document image - construct object
var image = {
'image_name': rows[j].image_name
};
doc.entry.images.push(image); // Push image into document object
}
return doc; // Return processed doc
};
getDocuments(function(err, docs) {
if(err) {
// Your error handler
}
if(docs) {
getImages(docs, function(err, finalDocs) {
if(err) {
// Your error handler
}
if(finalDocs) {
console.log(finalDocs);
res.status(200).json(finalDocs); // Send response
}
});
}
});
});
First we create a function to get documents - This function accepts a callback as its parameter. We run our query and construct our doc list. Then we return the document list by executing our callback
Next we run a function to get our images for each document. This function accepts our document list and a callback as our parameters. It retrieves images for each document and calls a helper function (sync)
Our helper function processes the images into each document and returns the processed document.
We then finish our operations by returning an array of processed documents via the second callback.
Other notes
We could tide this up by structuring this procedural style code into a contained JSON object
The nesting of the function executions at the end of the document could be cleaned up further
I've avoided using the async libray as it helps better understanding of the callback model
Event emitters could be used to flatten the callbacks - see https://nodejs.org/dist/latest-v7.x/docs/api/events.html
Hope this helps
Dylan

Is file path valid in gulp src

I'm looking for solution which will help me to find is file path valid. And if file path not valid to show some error.
gulp.task("scripts-libraries", ["googlecharts"], function () {
var scrlibpaths = [
"./node_modules/jquery/dist/jquery.min.js",
"./node_modules/bootstrap/dist/js/bootstrap.min.js",
"./libs/AdminLTE-2.3.0/plugins/slimScroll/jquery.slimscroll.min.js",
"./libs/AdminLTE-2.3.0/plugins/fastclick/fastclick.min.js",
"./libs/adminLTE-app.js",
"./node_modules/moment/min/moment.min.js",
"./node_modules/jquery.inputmask/dist/jquery.inputmask.bundle.js",
"./node_modules/bootstrap-timepicker/js/bootstrap-timepicker.min.js",
"./node_modules/bootstrap-checkbox/dist/js/bootstrap-checkbox.min.js",
"./node_modules/bootstrap-daterangepicker/daterangepicker.js",
"./node_modules/select2/dist/js/select2.full.min.js",
"./node_modules/toastr/build/toastr.min.js",
"./node_modules/knockout/build/output/knockout-latest.js",
"./node_modules/selectize/dist/js/standalone/selectize.min.js",
//"./src/jquery.multiselect.js"
];
for (var i = 0; i < scrlibpaths.length; i++) {
if (scrlibpaths[i].pipe(size()) === 0) {
console.log("There is no" + scrlibpaths[i] + " file on your machine");
return;
}
}
return gulp.src(scrlibpaths)
.pipe(plumber())
.pipe(concat("bundle.libraries.js"))
.pipe(gulp.dest(config.path.dist + "/js"));
});
So how can i make this to work?
You can use the glob module to check if the paths/globs you pass to gulp.src() refer to existing files. Gulp itself uses glob internally via glob-stream so this should be the most reliable option.
Here's a function that uses glob and that you can use as a more or less drop-in replacement for the regular gulp.src():
var glob = require('glob');
function gulpSrc(paths) {
paths = (paths instanceof Array) ? paths : [paths];
var existingPaths = paths.filter(function(path) {
if (glob.sync(path).length === 0) {
console.log(path + ' doesnt exist');
return false;
}
return true;
});
return gulp.src((paths.length === existingPaths.length) ? paths : []);
}
You can then use it like this:
return gulpSrc(scrlibpaths)
.pipe(plumber())
.pipe(concat("bundle.libraries.js"))
.pipe(gulp.dest(config.path.dist + "/js"));
If any of the paths/globs in srclibpaths doesn't exist a warning is logged and the stream will be empty (meaning no files will be processed at all).
Since gulp is just like any other node script you can use accessSync to check whether the file exists (I assume you probably want to be synchronous).
var fs = require('fs');
scrlibpaths.map(function(path) {
try {
fs.accessSync(path);
} catch (e) {
console.log("There is no " + path + " file on your machine");
}
});

Angular JS correct factory structure

I have a factory in my AngularJS single page application that parses a given date against a JSON file to return season and week-number in season. I am currently calling the JSON file twice in each method $http.get('content/calendar.json').success(function(data) {....
How can i factor out the call to do it once regardless of how many methods?
emmanuel.factory('DayService', function($http, $q){
var obj = {};
obj.season = function(d){
// receives a mm/dd/yyyy string parses against Calendar service for liturgical season
d = new Date(d);
var day = d.getTime();
var promise = $q.defer();
var temp;
$http.get('content/calendar.json').success(function(data) {
for (var i=0; i<data.calendar.seasons.season.length; i++){
var start = new Date(data.calendar.seasons.season[i].start);
var end = new Date(data.calendar.seasons.season[i].end);
end.setHours(23,59);
//sets the time to be the last minute of the season
if (day >= start && day <= end){
//if given time fits within start and end dates in calendar then return season
temp = data.calendar.seasons.season[i].name;
//console.log(temp);
promise.resolve(temp);
break;
}
}
});
return promise.promise;
}
obj.weekInSeason = function(d){
//receives a date format mm/dd/yyyy
var promise = $q.defer();
$http.get('content/calendar.json').success(function(data) {
for (var i=0; i<data.calendar.seasons.season.length; i++){
d = new Date(d);
var day = d.getTime();
var end = new Date(data.calendar.seasons.season[i].end);
end.setHours(23,59);
end = end.getTime();
var diff = end - day;
if (parseFloat(diff) > 0){
var start = new Date(data.calendar.seasons.season[i].start);
start = start.getTime();
var startDiff = day - start;
var week = parseInt(startDiff /(1000*60*60*24*7))+1;
promise.resolve(week);
break;
}
}
});
return promise.promise;
}
obj.getData = function (d) {
console.log('DayService.getData')
console.log(today)
var data = $q.all([
this.season(d),
this.weekInSeason(d)
]);
return data;
};
return obj;
});
This solution assumes that content/calendar.json never changes.
I have answered a question which can help you in this problem one way or another. Basically you must fetch all necessary configurations/settings before the application bootstraps. Manually bootstrap the application, this means that you must remove the ng-app directive in your html.
Steps:
[1] Create bootstrapper.js as instructed in the answered question I have mentioned above. Basically, it should look like this(Note: You can add more configuration urls in urlMap, if you need to add more settings in your application before it bootstraps):
angular.injector(['ng']).invoke(function($http, $q) {
var urlMap = {
$calendar: 'content/calendar.json'
};
var settings = {};
var promises = [];
var appConfig = angular.module('app.settings', []);
angular.forEach(urlMap, function(url, key) {
promises.push($http.get(url).success(function(data) {
settings[key] = data;
}));
});
$q.all(promises).then(function() {
bootstrap(settings);
}).catch(function() {
bootstrap();
});
function bootstrap(settings) {
appConfig.value('Settings', settings);
angular.element(document).ready(function() {
angular.bootstrap(document, ['app', 'app.settings']);
});
}
});
[2] Assuming that the name of your main module is app within app.js:
angular.module('app', [])
.factory('DayService', function(Settings){
var calendar = Settings.$calendar,
season = calendar.seasons.season,
obj = {};
obj.season = function(d){
var day = new Date(d).getTime(),
start, end, value;
for (var i = 0; i < season.length; i++){
start = new Date(season[i].start);
end = new Date(season[i].end);
end.setHours(23,59);
if (day >= start && day <= end){
value = season[i].name;
break;
}
}
return value;
};
obj.weekInSeason = function(d){
var day = new Date(d).getTime(),
end, diff, start, startDiff, week;
for (var i = 0; i < season.length; i++){
end = new Date(season[i].end);
end.setHours(23,59);
end = end.getTime();
diff = end - day;
if (parseFloat(diff) > 0){
start = new Date(season[i].start);
start = start.getTime();
startDiff = day - start;
week = parseInt(startDiff /(1000*60*60*24*7))+1;
break;
}
}
return week;
};
return obj;
});
[3] Controller Usage(Example):
angular.module('app')
.controller('SampleController', function(DayService) {
console.log(DayService.season(3));
console.log(DayService.weekInSeason(3));
});
Another Note: Use .run() to check if Settings === null - if this is true, you can direct to an error page or any page that displays the problem(This means that the application bootstrapped but one of the requested configuration failed).
UPDATE:
I checked the link you have provided, and it seems that the version you are using is AngularJS v1.0.8, which does not have a .catch() method in their $q promise implementation.
You have the following options to consider in solving this problem:
-1 Change the AngularJS version you are using to the latest stable version 1.2.23.
Note that this option may break some of your code that is highly reliant on the version that you are using.
-2 Change this block:
$q.all(promises).then(function() {
bootstrap(settings);
}).catch(function() {
bootstrap();
});
to:
$q.all(promises).then(function() {
bootstrap(settings);
}, function() {
bootstrap();
});
This option is safer if you already have existing code that relies on the current AngularJS version you are using But I would suggest you change to the current stable version as it has more facilities and fixes than the one you are currently using.
Use your scope and closure of the factory to store the value of the response to the http call. I created an object called calData, which happens to already be a promise!
This gives you the ability to kick a few things off when the first call to the factory is made by running an IIFE (this is the function called initService), and everything chains together to resolve after the data is loaded.
.factory('dayService', function dayServiceFactory($http, $q){
var getCalData = $q.defer();
var calData = gettingData.promise; // null/undefined until _loadData is called and resolved
function _loadData(){
var deferred = $q.defer();
$http.get('content/calendar.json').success(function(data) {
calData.seasons = data.calendar.seasons; // your code seems to always use at least calendar.seasons, so easier to assign that
deferred.resolve(data);
});
return deferred.promise;
}
// this function will automatically run and load data the first time the factory is executed
(function initService(){
_loadData().then(){
// here is where you will build all your functions to assign properties to calData.seasons or any other child property of calData;
calData.getSeason = function(){
for (var i=0; i<data.calendar.seasons.season.length; i++){
// code here
}
}// function to get day using calData.seasons
calData.weekInSeason = function(){}
getCalData.resolve(); // this resolves the data in the outer scope
}
}());
return calData; // returns the promise, and will execute the first time called
});
To use this in a controller, make sure to either resolve the service before you instantiate the controller, or withing the controller, use your assignments of the data after it has resolved. (Bound values will auto-update when it's resolved)
dayService.then(function(){
// now you can use this:
var week = dayService.weekInSeason();
})
You can create separate method for getting calendar data and chain promises in getData method:
emmanuel.factory('DayService', ['$q', '$timeout', '$log',
function($q, $timeout, $log) {
return {
season: season,
weekInSeason: weekInSeason,
getData: getData
};
function season(d) {
$log.log('season called');
return getCalendar(d).then(function(calendar) {
return getSeason(d, calendar);
});
}
function weekInSeason(d) {
$log.log('weekInSeason called');
return getCalendar(d).then(function(calendar) {
return getWeekInSeason(d, calendar);
});
}
function getData(d) {
$log.log('getData called');
return getCalendar(d).then(
function(calendar) {
return $q.all({
season: getSeason(d, calendar),
weekInSeason: getWeekInSeason(d, calendar)
});
}
);
}
function getSeason(date, calendar) {
$log.log('getSeason called');
return {
date: date,
calendar: calendar,
method: 'getSeason'
};
}
function getWeekInSeason(date, calendar) {
$log.log('getWeekInSeason called');
return {
date: date,
calendar: calendar,
method: 'getWeekInSeason'
};
}
function getCalendar(d) {
$log.log('getCalendar called');
var deferred = $q.defer();
$timeout(function() {
deferred.resolve(12345);
}, 2000);
return deferred.promise;
}
}
]);
Also, if calendar.json doesn't changed during application lifetime, you can cache calendar.json ajax request result as suggested by #runTarm
Plunker

NodeJS MySQL Dump

I've attempted to write a basic cron script to run and 'dump' a mysql database. For some reason, when it 'successfully saves the file', it does create the file, but it is empty. If instead of saving the file, I perform a console.log, it prints an empty string. Any thoughts on what I may be doing wrong?
Thanks in advance.
var mysql_backup = function(){
this.backup = '';
this.mysql = require('mysql'),
this.init = function(){
this.connection = this.mysql.createConnection({
user: 'root',
password: 'root',
database: 'test'
});
}
this.query = function(sql, callback) {
this.connection.query(sql, function (error, results, fields) {
if (error) {
throw error;
}
if (results.length > 0) {
callback(results);
}
});
}
this.get_tables = function(callback){
var me = this;
me.query('SHOW TABLES',
function(tables) {
for (var table in tables){
me.query(
'SHOW CREATE TABLE ' + tables[table].Tables_in_test,
function(r){
for (var t in r) {
me.backup += "DROP TABLE " + r[t].Table + "\n\n";
me.backup += r[t]["Create Table"] + "\n\n";
}
}
)
}
me.save_backup();
});
}
this.save_backup = function(){
var fs = require('fs');
fs.writeFile("./backup_test.txt", this.backup, function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
}
};
var db = new mysql_backup;
db.init();
db.get_tables();
db.connection.destroy();
The code as written didn't even get to a file saving for me. There seem like a few issues. Not sure if this is the actual code or some things got lost in the copy paste. However, based on what you've got:
A big one is that you never connect to the database in your code with connection.connect().
The code you want to run once connected should be inside the connection.connect() callback. e.g.
connection.connect(function (err, empty) {
if (err)
throw new Error ('Panic');
// if no error, we are off to the races...
}
However, even if you quickly refactor your code to wrap your last lines inside of that get connection callback, you'll still have problems, because you are destroying the connection before the various SQL calls are getting made, so you will want to move the code into some sort of final callback.
Even after you do that, you'll still have an empty file, because you're calling save_backup from your 'SHOW TABLES' callback rather than after you have actually populated it via the inner callback where you get the CREATE TABLE statement and populate the backup property.
This is the minimal rewriting of your code which will do what you are intending. An important thing to note is the "counter" which manages when to write the file and close the connection. I would make other changes if it were mine, including:
Using 'self' instead of 'me'
Using a numeric for loop rather than the for (... in ...) syntax
Having my own callbacks fall the node convention of (err, stuff)
A more substantial changes is that I would rewrite this to use promises, as doing so can spare you some grief with the confusion inherent with deeply nested callbacks. I personally like the Q library, but there are several options here.
Hope this helped.
var mysql_backup = function(){
this.backup = '';
this.mysql = require('mysql');
this.init = function(){
this.connection = this.mysql.createConnection({
user : 'root',
password : 'root',
database : 'test'
});
};
this.query = function(sql, callback) {
this.connection.query(sql, function (error, results, fields) {
if (error) {
throw error;
}
if (results.length > 0) {
callback(results);
}
});
};
this.get_tables = function(callback){
var counter = 0;
var me = this;
this.query('SHOW TABLES',
function(tables) {
for (table in tables){
counter++;
me.query(
'SHOW CREATE TABLE ' + tables[table].Tables_in_mvc,
function(r){
for (t in r) {
me.backup += "DROP TABLE " + r[t].Table + "\n\n";
me.backup += r[t]["Create Table"] + "\n\n";
}
counter--;
if (counter === 0){
me.save_backup();
me.connection.destroy();
}
}
)
}
});
};
this.save_backup = function(){
var fs = require('fs');
fs.writeFile("./backup_test.txt", this.backup, function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
}
};
var db = new mysql_backup;
db.init();
db.connection.connect(function (err){
if (err) console.log(err);
db.get_tables(function(x){;});
});
Update: If you are curious, here is a heavily-commented implementation using promises. Note that without the comments explaining the Q promise library functions, it is somewhat shorter than the original version and also offers more comprehensive error handling.
var MysqlBackup = function(connectionInfo, filename){
var Q = require('q');
var self = this;
this.backup = '';
// my personal preference is to simply require() inline if I am only
// going to use something a single time. I am certain some will find
// this a terrible practice
this.connection = require('mysql').createConnection(connectionInfo);
function getTables(){
// return a promise from invoking the node-style 'query' method
// of self.connection with parameter 'SHOW TABLES'.
return Q.ninvoke(self.connection,'query', 'SHOW TABLES');
};
function doTableEntries(theResults){
// note that because promises only pass a single parameter around,
// if the 'denodeify-ed' callback has more than two parameters (the
// first being the err param), the parameters will be stuffed into
// an array. In this case, the content of the 'fields' param of the
// mysql callback is in theResults[1]
var tables = theResults[0];
// create an array of promises resulting from another Q.ninvoke()
// query call, chained to .then(). Note that then() expects a function,
// so recordEntry() in fact builds and returns a new one-off function
// for actually recording the entry (see recordEntry() impl. below)
var tableDefinitionGetters = [];
for (var i = 0; i < tables.length ; i++){
// I noticed in your original code that your Tables_in_[] did not
// match your connection details ('mvc' vs 'test'), but the below
// should work and is a more generalized solution
var tableName = tables[i]['Tables_in_'+connectionInfo.database];
tableDefinitionGetters.push(Q.ninvoke(self.connection, 'query', 'SHOW CREATE TABLE ' + tableName)
.then(recordEntry(tableName)) );
}
// now that you have an array of promises, you can use Q.allSettled
// to return a promise which will be settled (resolved or rejected)
// when all of the promises in the array are settled. Q.all is similar,
// but its promise will be rejected (immediately) if any promise in the
// array is rejected. I tend to use allSettled() in most cases.
return Q.allSettled(tableDefinitionGetters);
};
function recordEntry (tableName){
return function(createTableQryResult){
self.backup += "DROP TABLE " + tableName + "\n\n";
self.backup += createTableQryResult[0][0]["Create Table"] + "\n\n";
};
};
function saveFile(){
// Q.denodeify return a promise-enabled version of a node-style function
// the below is probably excessively terse with its immediate invocation
return (Q.denodeify(require('fs').writeFile))(filename, self.backup);
}
// with the above all done, now you can actually make the magic happen,
// starting with the promise-return Q.ninvoke to connect to the DB
// note that the successive .then()s will be executed iff (if and only
// if) the preceding item resolves successfully, .catch() will get
// executed in the event of any upstream error, and finally() will
// get executed no matter what.
Q.ninvoke(this.connection, 'connect')
.then(getTables)
.then(doTableEntries)
.then(saveFile)
.then( function() {console.log('Success'); } )
.catch( function(err) {console.log('Something went awry', err); } )
.finally( function() {self.connection.destroy(); } );
};
var myConnection = {
host : '127.0.0.1',
user : 'root',
password : 'root',
database : 'test'
};
// I have left this as constructor-based calling approach, but the
// constructor just does it all so I just ignore the return value
new MysqlBackup(myConnection,'./backup_test.txt');