Can't insert element from array in mysql database with nodejs - mysql

I actually have a problem saving some data from an array in a mysql database with nodejs.
This is my code
for (var i = 0; i < data.data.length; i++) {
var imageObject = data.data[i];
var url = imageObject.images.standard_resolution.url;
var id = imageObject.id;
var sql = 'SELECT COUNT(*) AS imageIDCount FROM images WHERE id = ?'
var ids
connection.query(sql, [id], function(err, rows, fields) {
console.log(rows[0].imageIDCount);
if (err) throw err;
if (rows[0].imageIDCount == 0) {
console.log(id + " doesn't exist"); // ### the ID at this point is always the last from that array
//insertImage(id, url);
} else {
// console.log("ID exists");
}
});
}
This code run's when I get a response from an rest-api with the request-framework.
So my problem is that at the point I get the result from the count-query and there is no element with the specific id I get always the same id. I think that's because I use the same variable "id" there but how can I fix it ? I hope somebody can help me.

In this case, you're a victim of Node's asynchronous event loop. You're executing a synchronous for-loop and defining id:
for (var i = 0; i < data.data.length; i++) {
var id = imageObject.id;
}
This works in normal Javascript if you try and do something with id, but because the database module you're using runs asynchronously, that entire loop will have already completed before your first db query completes, effectively clobbering the value of id.
You'll need to re-write your function to behave asynchronously instead, or use something like node-async to help.
Here's a quick example of how that might look. Note that I didn't write your insertImage function for you; you'll need to rewrite that to support a callback as well.
async.each(data.data, function(imageObject, callback) {
var sql = 'SELECT COUNT(*) AS imageIDCount FROM images WHERE id = ?'
connection.query(sql, [imageObject.id], function(err, rows, fields) {
if (err) callback(err);
if (rows[0].imageIDCount == 0) {
console.log(id + " doesn't exist");
insertImage(imageObject.id, imageObject.images.standard_resolution.url, function(err) {
callback(err); // fires the callback to async
})
} else {
console.log(id + " already exists");
callback(); // maybe you want an error here too?
}
});
}, function(err, results) {
// all of your db queries are completed
});

Related

Node.js trying to use async/await to get data from a mysql select before using the variable

I need to catch some data by a mysql query, and use the result to build up and email message with its results with node.
I put the code inside a function, but the call to the query appear to still be async, as the result is never given back before the end of the function, and the returning variable is alwasy empty.
Tried different approach with async/await but still the execution seems async
In my following code is just get in the console log up to the step 3, the step 4 is mde no matter what I try to do at the end of the function call
async function querydb (utente){
console.log("sono in querydb");
var messageHTMLAllegati="";
var risultatoquery;
console.log("step 1");
var connection = mysql.createConnection({
host : process.env.IP_address,
user : process.env.nome_utente,
password : process.env.password,
port : process.env.port,
database : process.env.DB,
});
console.log("step 2");
const query = util.promisify(connection.query).bind(connection);
(async () => {
try {
console.log("step 3");
var result = await query('SELECT Link FROM Link_Foto where ID_Utente="' + utente + '"');
var i = result.length;
console.log("step 4");
var j ;
for (j=0; j < i; j++) {
messageHTMLAllegati +='Immagine ' + (j+1)+ '<BR>';
console.log("print the link found in the DB and added to the text to be printed"+result[j].Link);
}
} finally {
connection.end();
}
})()
return messageHTMLAllegati;
}
I do expect the final variable "messageHTMLAllegati" to contain some text plus the query fields needed, but it get always empty. In the log I see though that the variable is filled up, but only after that the function is returned, therefore the text used to put the email together is empty from the DB section
async/await method only works when await functions is a promise. functions like 'query' in mysql are using a callback function to get the result. So if you want to use it with async/await method you should use it in another function and get the result in its callback function as a promise like this:
function query_promise(q_string){
return new Promise((resolve, reject)=>{
query(q_string,(err, result)=>{
if(err) return reject(err);
resolve(result);
});
});
}
then in your code:
var result = await query_promise('SELECT Link FROM Link_Foto where ID_Utente="' + utente + '"');

Node.js asynchronous nested mysql queries

To set the scenario for the code, the database stores Documents, and each document has the potential to have Images associated with them.
I have been trying to write a route that queries the database for each Document that has Images related to them, storing this data in JSON which is returned to the ajax request when completed, so the data can be viewed on the page. The closest I have got so far is the below attempt (see code).
router.post('/advanced_image_search', userAuthenticated, function(req, res, next) {
async.waterfall([
// First function is to get each document which has an image related
function getDocuments(callback){
connection.query(`SELECT DISTINCT(Document.document_id), Document.doc_name, Document.doc_version_no, Document.doc_date_added
FROM Document WHERE doc_type = 'image'`, function(err, results) {
if (err) {
callback(err, null);
return;
}
// The Object containing the array where the data from the db needs to be stored
var documents = {
'docs': []
};
// foreach to iterate through each result found from the first db query (getDocuments)
results.forEach(function(result) {
// New object to store each document
var document = {};
document.entry = result;
// This is the array where each image assciated with a document will be stored
document.entry.images = [];
// Push each document to the array (above)
documents.docs.push(document);
var doc_id = result.document_id;
})
// Returning the results as 'documents' to the next function
callback(null, documents);
})
},
function getImages(documents, callback){
// Variable assignement to the array of documents
var doc_array = documents.docs;
// Foreach of the objects within document array
async.forEachOf(doc_array, function(doc, key, callback){
// Foreach object do the following series of functions
async.waterfall([
function handleImages(callback){
// The id of the document to get the images for
var doc_id = doc.entry.document_id;
connection.query(`SELECT * FROM Image, Document WHERE Image.document_id = '${doc_id}' AND Image.document_id = Document.document_id`, function(err, rows) {
if (err) {
callback(err, null);
return;
}
callback(null, rows);
})
},
// Function below to push each image to the document.entry.images array
//
function pushImages(rows, callback){
// If multiple images are found for that document, the loop iterates through each pushing to the images array
for (var j = 0; j < rows.length; j++) {
// Creating new object for each image found so the data can be stored within this object, then pushed into the images array
var image = {
'image_name': rows[j].image_name
};
doc.entry.images.push(image);
}
callback(null, doc_array);
}
], function(err, doc_array){
if (err) {
console.log('Error in second waterfall callback:')
callback(err);
return;
}
console.log(doc.entry);
// callback(null, doc_array);
})
}, function(err, doc_array){
if (err) {
callback(err);
return;
}
callback(null, doc_array);
});
callback(null, doc_array);
}
], function(err, doc_array) {
if (err){
console.log('Error is: '+err);
return;
}
// The response that should return each document with each related image in the JSON
res.send(doc_array);
})
});
At the moment the results returned are:
1:
{entry: {document_id: 1, doc_name: "DocumentNameHere", doc_version_no: 1,…}}
entry:
{document_id: 1, doc_name: "DocumentNameHere", doc_version_no: 1,…}
doc_date_added:"2016-10-24"
doc_name:"DocumentNameHere"
doc_version_no:1
document_id:1
images:[]
As can be seen above, the images array remains empty even though with testing, the images are being found (console.log).
I hope someone is able to assist with this, as I am struggling to find the problem with this complex one.
Thanks
There are several async operations going on here and each operation needs a callback. See revised code:
router.post('/advanced_image_search', userAuthenticated, function(req, res, next) {
var getDocuments = function(next) {
// Function for getting documents from DB
var query = `SELECT DISTINCT(Document.document_id), Document.doc_name, Document.doc_version_no, Document.doc_date_added FROM Document WHERE doc_type = 'image'`; // Set the query
connection.query(query, function(err, results) {
// Run the query async
if(err) {
// If err end execution
next(err, null);
return;
}
var documentList = []; // Array for holding docs
for(var i=0; i<results.length; i++) {
// Loop over results, construct the document and push to an array
var documentEntry = results[i];
var documentObject = {};
documentObject.entry = documentEntry;
documentObject.entry.images = [];
documentObject.id = documentEntry.document_id;
documentList.push(documentObject);
}
next(null, documents); // Pass to next async operation
});
};
var getImages = function(documents, next) {
// Function for getting images from documents
var finalDocs = []; // Blank arry for final documents with images
for (var i=0; i<documents.length; i++) {
// Loop over each document and construct the query
var id = documents[i].id;
var query = `SELECT * FROM Image, Document WHERE Image.document_id = '${doc_id}' AND Image.document_id = Document.document_id`;
connection.query(query, function(err, images) {
// Execute the query async
if(err) {
// Throw error to callback
next(err, null);
return;
}
var processedDoc = processImages(documents[i], images); // Call a helper function to process all images into the document object
finalDocs.push(processedDoc); // Push the processed doc
if(i === documents.length) {
// If there are no more documents move onto next async
next(null, finalDocs);
}
});
}
};
var processImages = function(doc, images) {
for (var i=0; i< images.length; i++) {
// Loop over each document image - construct object
var image = {
'image_name': rows[j].image_name
};
doc.entry.images.push(image); // Push image into document object
}
return doc; // Return processed doc
};
getDocuments(function(err, docs) {
if(err) {
// Your error handler
}
if(docs) {
getImages(docs, function(err, finalDocs) {
if(err) {
// Your error handler
}
if(finalDocs) {
console.log(finalDocs);
res.status(200).json(finalDocs); // Send response
}
});
}
});
});
First we create a function to get documents - This function accepts a callback as its parameter. We run our query and construct our doc list. Then we return the document list by executing our callback
Next we run a function to get our images for each document. This function accepts our document list and a callback as our parameters. It retrieves images for each document and calls a helper function (sync)
Our helper function processes the images into each document and returns the processed document.
We then finish our operations by returning an array of processed documents via the second callback.
Other notes
We could tide this up by structuring this procedural style code into a contained JSON object
The nesting of the function executions at the end of the document could be cleaned up further
I've avoided using the async libray as it helps better understanding of the callback model
Event emitters could be used to flatten the callbacks - see https://nodejs.org/dist/latest-v7.x/docs/api/events.html
Hope this helps
Dylan

Yahoo-Finance Query Speed

I'm currently working on a project that involves querying yahoo-finance for many different ticker symbols. The bottleneck is acquiring the data from yahoo, so I was wondering if there is a way I might go about speeding this up.
If I used multiple machines to query and then aggregated the data, would that help? I only have one physical machine; how might I go about doing that?
Thanks!
EDIT: Currently, I'm using Node.js, yahoo-finance, and Q.deferred to ask yahoo for historical data. Then, once all the promises are fulfilled (for each ticker), I'm doing a Q.all() to persist the data.
var data = [];
tickers = ["goog", "aapl", ...];
...
Q.all(_.map(tickers, function(symbol) {
return getYahooPromise(symbol);
}))
.done( function() { persistData(data) });
getYahooPromise retrieves data for the ticker symbol and pushes it into the data array. Once all promises are resolved, the data is persisted in a MySQL database.
SECOND EDIT:
More code:
var sequentialCalls = [];
for ( var i = 0; i < tickers.length / chunkSize; i++ ) {
sequentialCalls.push( persistYahooChunk );
}
sequentialCalls.push( function(callback) {
connection.end();
callback();
});
async.series( sequentialCalls )
exports.persistYahooChunk = function(callback) {
console.log("Starting yahoo query");
var currentTickers = tickers.slice(currentTickerIndex,currentTickerIndex + chunkSize);
return yahooFinance.historical( {
symbols: currentTickers,
from: "2015-01-28",
to: "2015-02-05"
}).then( function(result) {
console.log("Query " + currentTickerIndex + "/" + tickers.length + "completed");
currentTickerIndex += chunkSize;
//add valid data
var toPersist = _.map(result, function(quotes, symbol) {
return [symbol, quotes.length != 0 ];
});
var query = "INSERT INTO `ticker` (`symbol`, `valid`) VALUES ?";
connection.query(query, [toPersist], function(err, result) {
if (err) {
console.log (err);
}
//console.log(result);
callback();
});
});
}
The bottleneck is because you are doing one query per ticker.
Depending on the data you need to pull, if you could do a single query that includes all your tickers it would be much faster.
Here is an example if you need to get all current prices for a list of tickers, with a single query :
http://finance.yahoo.com/webservice/v1/symbols/A,B,C,D,E/quote?format=json

Creating synchronous queries with node-mysql

I'm trying to ensure that one mysql query leads to another and is not completed until all of its children queries are completed. So for example, I start with one select and stream rows and execute subsequent queries from that row result. This is doable with callbacks, but I end up running out of memory, so I'd like to slow down the process and run batches, but due to the async nature of the dispatch, I can't keep things in phase and end the connection after all the rows have been processed.
Here's an example:
var query = conn.query('select id from table1 limit 10');
query.on('result', function(row){
console.log('query1', row);
var query2 = conn.query('select id from books where id = ? ', [row.id]);
query2.on('result', function(row2){
console.log('query2', row2);
var query3 = conn.query('insert into test (id) values (?)', [row2.id]);
query3.on('result', function(row3){
console.log(row3);
});
});
});
query.on('end', function(){
conn.end();
});
The above fails, because there are still rows to process in query3 after the initial query is ended.
Any thoughts? The actual code is even more complicated, because I have to process xml from the subsequent queries and fire off even more inserts as I loop through the batch.
Thanks!
I would suggest this solution with async module:
var async = require("async");
// connection instance
var conn;
// here goes task serving logic
// if any async function should be finished before drain callback, push them into q
var solvers = {
query: function(q, task, row){
console.log('query1', row);
q.push({
solver: "query2",
req: "select id from books where id = ?",
reqArgs: [row.id]
});
},
query2: function(q, task, row){
console.log('query2', row);
q.push({
solver: "query3",
req: "insert into test (id) values (?)",
reqArgs: [row.id]
});
},
query3: function(q, task, row){
console.log(row);
}
}
// here is a queue of tasks
var q = async.queue(function(task, cb){
var query = conn.query(task.req, task.reqArgs);
query.on("end", cb);
query.on("result",function(row){
solvers[task.solver](q, task, row);
});
}, 2); // limit of parallel queries
// when every request has reached "end"
q.drain = function(){
conn.end();
// continue from here
};
// initial task
q.push({
solver: "query",
req: "select id from table1 limit 10",
reqArgs: []
});
But still, I'm not sure that making requests ID by ID is a good solution.
Maybe, I'm just not aware of a full problem.
#glukki, thanks for the great answer and reference to async. I went with a permutation of your code and two async requests which do a 'chomp and chew' using a single connection and pool of connections to process over 100K row select into 1.2M row inserts. Worked amazingly well and took less than 10 minutes. Here's the full implementation minus the module and connection setup. I hope this helps someone else too. Thanks again!
function populateMesh(row, callback){
xmlParser.parseString('<root>'+row.mesh_heading_list+'</root>', function(err, result){
var q2 = async.queue(function (task, cb) {
pool.getConnection(function(err, cnx){
cnx.query('INSERT INTO abstract_mesh (mesh_id, abstract_id, major_topic) SELECT mesh_descriptor.id, ?, ? FROM mesh_descriptor WHERE mesh_descriptor.name = ?', [task.id, task.majorTopic, task.descriptorName], function(err, result){
if (err) {throw err;}
cnx.release();
cb();
});
});
}, 50);
q2.drain = function() {
//console.log('all mesh processed');
callback();
}
if(!(result.root instanceof Object)){
//console.log('its not obj!', row.id);
q2.push({id: row.id, majorTopic: 'N', descriptorName: 'Null'}, function (err) {});
}
for(var i in result.root.MeshHeading){
// console.log('in loop',result.root.MeshHeading[i].DescriptorName);
if(typeof result.root.MeshHeading[i].DescriptorName === 'undefined'){
q2.push({id: row.id, majorTopic: 'N', descriptorName: 'Emergency'}, function(err){});
}
for(var j in result.root.MeshHeading[i].DescriptorName){
var descriptorName = result.root.MeshHeading[i].DescriptorName[j]._;
var majorTopic = result.root.MeshHeading[i].DescriptorName[j].$.MajorTopicYN;
q2.push({id: row.id, majorTopic: majorTopic, descriptorName: descriptorName}, function (err) {});
}
}
});
}
// here goes task serving logic
// if any async function should be finished before drain callback, push them into q
var q = async.queue(function (row, callback) {
console.log('got id: ' + row.id);
populateMesh(row, function(){
callback();
});
}, 10);
q.drain = function() {
console.log('all items have been processed');
conn.end(function(err){
console.log('connection ended');
});
pool.end(function(err){
console.log('pool closed');
});
};
var truncate = conn.query('truncate abstract_mesh');
var select = conn.query('SELECT id, mesh_heading_list FROM pubtbl');
select.on('result', function(result){
// console.log(result);
q.push(result, function (err) {
//console.log('finished processing row');
});
});
In my opinion the best solution is to make the code synchronously in a very easy way.
You could use the "synchonize" package.
Just
npm install synchronize
Then var sync = require(synchronize);
Put logic which should be synchronous into a fiber by using
sync.fiber(function() {
//put your logic here
}
An example for two mysql queries:
var express = require('express');
var bodyParser = require('body-parser');
var mysql = require('mysql');
var sync = require('synchronize');
var db = mysql.createConnection({
host : 'localhost',
user : 'user',
password : 'password',
database : 'database'
});
db.connect(function(err) {
if (err) {
console.error('error connecting: ' + err.stack);
return;
}
});
function saveSomething() {
var post = {id: newId};
//no callback here; the result is in "query"
var query = sync.await(db.query('INSERT INTO mainTable SET ?', post, sync.defer()));
var newId = query.insertId;
post = {foreignKey: newId};
//this query can be async, because it doesn't matter in this case
db.query('INSERT INTO subTable SET ?', post, function(err, result) {
if (err) throw err;
});
}
When "saveSomething()" is called, it inserts a row in a main table and receives the last inserted id. After that the code below will be executed. No need for nesting promises or stuff like that.
This is what i did,
db.query(
"select name from USER where name = ?",
["test"],
(err, result) => {
if (err) {
console.log("Error : ", err);
} else if (result.length <= 0) {
res.json("Not Found");
} else {
console.log("name found, executing update query!");
updateAgeIfUserFound("test"); //Calling funtion with 2nd query
}
}
);
//Update age only if name is present
function updateAgeIfUserFound(name, age) {
if (name) {
db.query(
"update USER set age = ? where name = ?,
[age, name],
(err, result) => {
if (err) throw err;
console.log("Name Updated");
res.json("Name Updated");
}
);
}
}

NodeJS MySQL Dump

I've attempted to write a basic cron script to run and 'dump' a mysql database. For some reason, when it 'successfully saves the file', it does create the file, but it is empty. If instead of saving the file, I perform a console.log, it prints an empty string. Any thoughts on what I may be doing wrong?
Thanks in advance.
var mysql_backup = function(){
this.backup = '';
this.mysql = require('mysql'),
this.init = function(){
this.connection = this.mysql.createConnection({
user: 'root',
password: 'root',
database: 'test'
});
}
this.query = function(sql, callback) {
this.connection.query(sql, function (error, results, fields) {
if (error) {
throw error;
}
if (results.length > 0) {
callback(results);
}
});
}
this.get_tables = function(callback){
var me = this;
me.query('SHOW TABLES',
function(tables) {
for (var table in tables){
me.query(
'SHOW CREATE TABLE ' + tables[table].Tables_in_test,
function(r){
for (var t in r) {
me.backup += "DROP TABLE " + r[t].Table + "\n\n";
me.backup += r[t]["Create Table"] + "\n\n";
}
}
)
}
me.save_backup();
});
}
this.save_backup = function(){
var fs = require('fs');
fs.writeFile("./backup_test.txt", this.backup, function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
}
};
var db = new mysql_backup;
db.init();
db.get_tables();
db.connection.destroy();
The code as written didn't even get to a file saving for me. There seem like a few issues. Not sure if this is the actual code or some things got lost in the copy paste. However, based on what you've got:
A big one is that you never connect to the database in your code with connection.connect().
The code you want to run once connected should be inside the connection.connect() callback. e.g.
connection.connect(function (err, empty) {
if (err)
throw new Error ('Panic');
// if no error, we are off to the races...
}
However, even if you quickly refactor your code to wrap your last lines inside of that get connection callback, you'll still have problems, because you are destroying the connection before the various SQL calls are getting made, so you will want to move the code into some sort of final callback.
Even after you do that, you'll still have an empty file, because you're calling save_backup from your 'SHOW TABLES' callback rather than after you have actually populated it via the inner callback where you get the CREATE TABLE statement and populate the backup property.
This is the minimal rewriting of your code which will do what you are intending. An important thing to note is the "counter" which manages when to write the file and close the connection. I would make other changes if it were mine, including:
Using 'self' instead of 'me'
Using a numeric for loop rather than the for (... in ...) syntax
Having my own callbacks fall the node convention of (err, stuff)
A more substantial changes is that I would rewrite this to use promises, as doing so can spare you some grief with the confusion inherent with deeply nested callbacks. I personally like the Q library, but there are several options here.
Hope this helped.
var mysql_backup = function(){
this.backup = '';
this.mysql = require('mysql');
this.init = function(){
this.connection = this.mysql.createConnection({
user : 'root',
password : 'root',
database : 'test'
});
};
this.query = function(sql, callback) {
this.connection.query(sql, function (error, results, fields) {
if (error) {
throw error;
}
if (results.length > 0) {
callback(results);
}
});
};
this.get_tables = function(callback){
var counter = 0;
var me = this;
this.query('SHOW TABLES',
function(tables) {
for (table in tables){
counter++;
me.query(
'SHOW CREATE TABLE ' + tables[table].Tables_in_mvc,
function(r){
for (t in r) {
me.backup += "DROP TABLE " + r[t].Table + "\n\n";
me.backup += r[t]["Create Table"] + "\n\n";
}
counter--;
if (counter === 0){
me.save_backup();
me.connection.destroy();
}
}
)
}
});
};
this.save_backup = function(){
var fs = require('fs');
fs.writeFile("./backup_test.txt", this.backup, function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
}
};
var db = new mysql_backup;
db.init();
db.connection.connect(function (err){
if (err) console.log(err);
db.get_tables(function(x){;});
});
Update: If you are curious, here is a heavily-commented implementation using promises. Note that without the comments explaining the Q promise library functions, it is somewhat shorter than the original version and also offers more comprehensive error handling.
var MysqlBackup = function(connectionInfo, filename){
var Q = require('q');
var self = this;
this.backup = '';
// my personal preference is to simply require() inline if I am only
// going to use something a single time. I am certain some will find
// this a terrible practice
this.connection = require('mysql').createConnection(connectionInfo);
function getTables(){
// return a promise from invoking the node-style 'query' method
// of self.connection with parameter 'SHOW TABLES'.
return Q.ninvoke(self.connection,'query', 'SHOW TABLES');
};
function doTableEntries(theResults){
// note that because promises only pass a single parameter around,
// if the 'denodeify-ed' callback has more than two parameters (the
// first being the err param), the parameters will be stuffed into
// an array. In this case, the content of the 'fields' param of the
// mysql callback is in theResults[1]
var tables = theResults[0];
// create an array of promises resulting from another Q.ninvoke()
// query call, chained to .then(). Note that then() expects a function,
// so recordEntry() in fact builds and returns a new one-off function
// for actually recording the entry (see recordEntry() impl. below)
var tableDefinitionGetters = [];
for (var i = 0; i < tables.length ; i++){
// I noticed in your original code that your Tables_in_[] did not
// match your connection details ('mvc' vs 'test'), but the below
// should work and is a more generalized solution
var tableName = tables[i]['Tables_in_'+connectionInfo.database];
tableDefinitionGetters.push(Q.ninvoke(self.connection, 'query', 'SHOW CREATE TABLE ' + tableName)
.then(recordEntry(tableName)) );
}
// now that you have an array of promises, you can use Q.allSettled
// to return a promise which will be settled (resolved or rejected)
// when all of the promises in the array are settled. Q.all is similar,
// but its promise will be rejected (immediately) if any promise in the
// array is rejected. I tend to use allSettled() in most cases.
return Q.allSettled(tableDefinitionGetters);
};
function recordEntry (tableName){
return function(createTableQryResult){
self.backup += "DROP TABLE " + tableName + "\n\n";
self.backup += createTableQryResult[0][0]["Create Table"] + "\n\n";
};
};
function saveFile(){
// Q.denodeify return a promise-enabled version of a node-style function
// the below is probably excessively terse with its immediate invocation
return (Q.denodeify(require('fs').writeFile))(filename, self.backup);
}
// with the above all done, now you can actually make the magic happen,
// starting with the promise-return Q.ninvoke to connect to the DB
// note that the successive .then()s will be executed iff (if and only
// if) the preceding item resolves successfully, .catch() will get
// executed in the event of any upstream error, and finally() will
// get executed no matter what.
Q.ninvoke(this.connection, 'connect')
.then(getTables)
.then(doTableEntries)
.then(saveFile)
.then( function() {console.log('Success'); } )
.catch( function(err) {console.log('Something went awry', err); } )
.finally( function() {self.connection.destroy(); } );
};
var myConnection = {
host : '127.0.0.1',
user : 'root',
password : 'root',
database : 'test'
};
// I have left this as constructor-based calling approach, but the
// constructor just does it all so I just ignore the return value
new MysqlBackup(myConnection,'./backup_test.txt');