I've attempted to write a basic cron script to run and 'dump' a mysql database. For some reason, when it 'successfully saves the file', it does create the file, but it is empty. If instead of saving the file, I perform a console.log, it prints an empty string. Any thoughts on what I may be doing wrong?
Thanks in advance.
var mysql_backup = function(){
this.backup = '';
this.mysql = require('mysql'),
this.init = function(){
this.connection = this.mysql.createConnection({
user: 'root',
password: 'root',
database: 'test'
});
}
this.query = function(sql, callback) {
this.connection.query(sql, function (error, results, fields) {
if (error) {
throw error;
}
if (results.length > 0) {
callback(results);
}
});
}
this.get_tables = function(callback){
var me = this;
me.query('SHOW TABLES',
function(tables) {
for (var table in tables){
me.query(
'SHOW CREATE TABLE ' + tables[table].Tables_in_test,
function(r){
for (var t in r) {
me.backup += "DROP TABLE " + r[t].Table + "\n\n";
me.backup += r[t]["Create Table"] + "\n\n";
}
}
)
}
me.save_backup();
});
}
this.save_backup = function(){
var fs = require('fs');
fs.writeFile("./backup_test.txt", this.backup, function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
}
};
var db = new mysql_backup;
db.init();
db.get_tables();
db.connection.destroy();
The code as written didn't even get to a file saving for me. There seem like a few issues. Not sure if this is the actual code or some things got lost in the copy paste. However, based on what you've got:
A big one is that you never connect to the database in your code with connection.connect().
The code you want to run once connected should be inside the connection.connect() callback. e.g.
connection.connect(function (err, empty) {
if (err)
throw new Error ('Panic');
// if no error, we are off to the races...
}
However, even if you quickly refactor your code to wrap your last lines inside of that get connection callback, you'll still have problems, because you are destroying the connection before the various SQL calls are getting made, so you will want to move the code into some sort of final callback.
Even after you do that, you'll still have an empty file, because you're calling save_backup from your 'SHOW TABLES' callback rather than after you have actually populated it via the inner callback where you get the CREATE TABLE statement and populate the backup property.
This is the minimal rewriting of your code which will do what you are intending. An important thing to note is the "counter" which manages when to write the file and close the connection. I would make other changes if it were mine, including:
Using 'self' instead of 'me'
Using a numeric for loop rather than the for (... in ...) syntax
Having my own callbacks fall the node convention of (err, stuff)
A more substantial changes is that I would rewrite this to use promises, as doing so can spare you some grief with the confusion inherent with deeply nested callbacks. I personally like the Q library, but there are several options here.
Hope this helped.
var mysql_backup = function(){
this.backup = '';
this.mysql = require('mysql');
this.init = function(){
this.connection = this.mysql.createConnection({
user : 'root',
password : 'root',
database : 'test'
});
};
this.query = function(sql, callback) {
this.connection.query(sql, function (error, results, fields) {
if (error) {
throw error;
}
if (results.length > 0) {
callback(results);
}
});
};
this.get_tables = function(callback){
var counter = 0;
var me = this;
this.query('SHOW TABLES',
function(tables) {
for (table in tables){
counter++;
me.query(
'SHOW CREATE TABLE ' + tables[table].Tables_in_mvc,
function(r){
for (t in r) {
me.backup += "DROP TABLE " + r[t].Table + "\n\n";
me.backup += r[t]["Create Table"] + "\n\n";
}
counter--;
if (counter === 0){
me.save_backup();
me.connection.destroy();
}
}
)
}
});
};
this.save_backup = function(){
var fs = require('fs');
fs.writeFile("./backup_test.txt", this.backup, function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
}
};
var db = new mysql_backup;
db.init();
db.connection.connect(function (err){
if (err) console.log(err);
db.get_tables(function(x){;});
});
Update: If you are curious, here is a heavily-commented implementation using promises. Note that without the comments explaining the Q promise library functions, it is somewhat shorter than the original version and also offers more comprehensive error handling.
var MysqlBackup = function(connectionInfo, filename){
var Q = require('q');
var self = this;
this.backup = '';
// my personal preference is to simply require() inline if I am only
// going to use something a single time. I am certain some will find
// this a terrible practice
this.connection = require('mysql').createConnection(connectionInfo);
function getTables(){
// return a promise from invoking the node-style 'query' method
// of self.connection with parameter 'SHOW TABLES'.
return Q.ninvoke(self.connection,'query', 'SHOW TABLES');
};
function doTableEntries(theResults){
// note that because promises only pass a single parameter around,
// if the 'denodeify-ed' callback has more than two parameters (the
// first being the err param), the parameters will be stuffed into
// an array. In this case, the content of the 'fields' param of the
// mysql callback is in theResults[1]
var tables = theResults[0];
// create an array of promises resulting from another Q.ninvoke()
// query call, chained to .then(). Note that then() expects a function,
// so recordEntry() in fact builds and returns a new one-off function
// for actually recording the entry (see recordEntry() impl. below)
var tableDefinitionGetters = [];
for (var i = 0; i < tables.length ; i++){
// I noticed in your original code that your Tables_in_[] did not
// match your connection details ('mvc' vs 'test'), but the below
// should work and is a more generalized solution
var tableName = tables[i]['Tables_in_'+connectionInfo.database];
tableDefinitionGetters.push(Q.ninvoke(self.connection, 'query', 'SHOW CREATE TABLE ' + tableName)
.then(recordEntry(tableName)) );
}
// now that you have an array of promises, you can use Q.allSettled
// to return a promise which will be settled (resolved or rejected)
// when all of the promises in the array are settled. Q.all is similar,
// but its promise will be rejected (immediately) if any promise in the
// array is rejected. I tend to use allSettled() in most cases.
return Q.allSettled(tableDefinitionGetters);
};
function recordEntry (tableName){
return function(createTableQryResult){
self.backup += "DROP TABLE " + tableName + "\n\n";
self.backup += createTableQryResult[0][0]["Create Table"] + "\n\n";
};
};
function saveFile(){
// Q.denodeify return a promise-enabled version of a node-style function
// the below is probably excessively terse with its immediate invocation
return (Q.denodeify(require('fs').writeFile))(filename, self.backup);
}
// with the above all done, now you can actually make the magic happen,
// starting with the promise-return Q.ninvoke to connect to the DB
// note that the successive .then()s will be executed iff (if and only
// if) the preceding item resolves successfully, .catch() will get
// executed in the event of any upstream error, and finally() will
// get executed no matter what.
Q.ninvoke(this.connection, 'connect')
.then(getTables)
.then(doTableEntries)
.then(saveFile)
.then( function() {console.log('Success'); } )
.catch( function(err) {console.log('Something went awry', err); } )
.finally( function() {self.connection.destroy(); } );
};
var myConnection = {
host : '127.0.0.1',
user : 'root',
password : 'root',
database : 'test'
};
// I have left this as constructor-based calling approach, but the
// constructor just does it all so I just ignore the return value
new MysqlBackup(myConnection,'./backup_test.txt');
Related
I need to catch some data by a mysql query, and use the result to build up and email message with its results with node.
I put the code inside a function, but the call to the query appear to still be async, as the result is never given back before the end of the function, and the returning variable is alwasy empty.
Tried different approach with async/await but still the execution seems async
In my following code is just get in the console log up to the step 3, the step 4 is mde no matter what I try to do at the end of the function call
async function querydb (utente){
console.log("sono in querydb");
var messageHTMLAllegati="";
var risultatoquery;
console.log("step 1");
var connection = mysql.createConnection({
host : process.env.IP_address,
user : process.env.nome_utente,
password : process.env.password,
port : process.env.port,
database : process.env.DB,
});
console.log("step 2");
const query = util.promisify(connection.query).bind(connection);
(async () => {
try {
console.log("step 3");
var result = await query('SELECT Link FROM Link_Foto where ID_Utente="' + utente + '"');
var i = result.length;
console.log("step 4");
var j ;
for (j=0; j < i; j++) {
messageHTMLAllegati +='Immagine ' + (j+1)+ '<BR>';
console.log("print the link found in the DB and added to the text to be printed"+result[j].Link);
}
} finally {
connection.end();
}
})()
return messageHTMLAllegati;
}
I do expect the final variable "messageHTMLAllegati" to contain some text plus the query fields needed, but it get always empty. In the log I see though that the variable is filled up, but only after that the function is returned, therefore the text used to put the email together is empty from the DB section
async/await method only works when await functions is a promise. functions like 'query' in mysql are using a callback function to get the result. So if you want to use it with async/await method you should use it in another function and get the result in its callback function as a promise like this:
function query_promise(q_string){
return new Promise((resolve, reject)=>{
query(q_string,(err, result)=>{
if(err) return reject(err);
resolve(result);
});
});
}
then in your code:
var result = await query_promise('SELECT Link FROM Link_Foto where ID_Utente="' + utente + '"');
I am trying to write a little script that reads an xml-file and then imports the data I fetch into a MySQL-Database. The fetching works fine, but the inserting is a problem. I've worked with the .forEach to stay asnyc which helped me a lot and worked fine until I needed to insert the Data. When I tried to execute the "INSERT"-Query outside of the 'Loops' everything works fine, but as soon as I try to execute it inside of one it just inserts everything once and then stops. Also the console.log() message inside the connection.query() doesn't get executed, even though no error gets thrown.
This is my code:
console.log("Running import!");
//Import necessary modules
var fs = require("fs");
var config = require("./config");
var mysql = require("mysql");
var path = require("path");
var xml2js = require("xml2js");
var parser = new xml2js.Parser();
var connection = mysql.createConnection({
host : config.mysql.host,
user : config.mysql.user,
password : config.mysql.password,
database : config.mysql.db
});
connection.connect(function(err) {
if (err) { //throw error
console.error(err.stack);
throw err;
}else{
console.log("Succesfully connected to the database!");
// Get Folders in Folder and iterate through every of them
var dirs = getDirectories(config.misc.path);
dirs.forEach(function(dir, index){
var file = config.misc.path + "\\" + dir + "\\" + config.misc.xml_name;
// read xml file of that directory
fs.readFile(file, function(err, data){
parser.parseString(data, function(err, json){
if(err) throw err;
json.resultset.forecast.forEach(function(forecast, index){
forecast.parking_house.forEach(function(ph, index){
var ph_id = ph["$"].id;
ph.estimate.forEach(function(estimate, index){
var value = estimate.value[0]["_"];
var time = estimate.timestamp[0]["_"];
//console.log(time);
connection.query("INSERT INTO lb_data.estimates VALUES (DEFAULT,'23:00:00', 213, 44);", function(error, results, fields){
if(error) throw error;
//Printing %
var perc = (index / dirs.length * 100);
console.log("Scanned and imported " + Math.round(perc) + "% of the files");
});
});
});
});
});
});
});
}
});
/**
* Gets the direcotries in a directory
* #param {string} srcpath the source path of the directory you want to scan
* #return {void}
*/
function getDirectories (srcpath) {
return fs.readdirSync(srcpath)
.filter(file => fs.statSync(path.join(srcpath, file)).isDirectory())
}
Already a big thanks in advance!
PS: Be ensured that the data in config.json is fine, I've tested that outside the loop, so that is not the problem ....
I actually have a problem saving some data from an array in a mysql database with nodejs.
This is my code
for (var i = 0; i < data.data.length; i++) {
var imageObject = data.data[i];
var url = imageObject.images.standard_resolution.url;
var id = imageObject.id;
var sql = 'SELECT COUNT(*) AS imageIDCount FROM images WHERE id = ?'
var ids
connection.query(sql, [id], function(err, rows, fields) {
console.log(rows[0].imageIDCount);
if (err) throw err;
if (rows[0].imageIDCount == 0) {
console.log(id + " doesn't exist"); // ### the ID at this point is always the last from that array
//insertImage(id, url);
} else {
// console.log("ID exists");
}
});
}
This code run's when I get a response from an rest-api with the request-framework.
So my problem is that at the point I get the result from the count-query and there is no element with the specific id I get always the same id. I think that's because I use the same variable "id" there but how can I fix it ? I hope somebody can help me.
In this case, you're a victim of Node's asynchronous event loop. You're executing a synchronous for-loop and defining id:
for (var i = 0; i < data.data.length; i++) {
var id = imageObject.id;
}
This works in normal Javascript if you try and do something with id, but because the database module you're using runs asynchronously, that entire loop will have already completed before your first db query completes, effectively clobbering the value of id.
You'll need to re-write your function to behave asynchronously instead, or use something like node-async to help.
Here's a quick example of how that might look. Note that I didn't write your insertImage function for you; you'll need to rewrite that to support a callback as well.
async.each(data.data, function(imageObject, callback) {
var sql = 'SELECT COUNT(*) AS imageIDCount FROM images WHERE id = ?'
connection.query(sql, [imageObject.id], function(err, rows, fields) {
if (err) callback(err);
if (rows[0].imageIDCount == 0) {
console.log(id + " doesn't exist");
insertImage(imageObject.id, imageObject.images.standard_resolution.url, function(err) {
callback(err); // fires the callback to async
})
} else {
console.log(id + " already exists");
callback(); // maybe you want an error here too?
}
});
}, function(err, results) {
// all of your db queries are completed
});
I am trying to create an REST API which should connect to an existing table in mysql database and return the data with respective to the parameter we send.
Actually nodejs and strongloop is new to me, this is first time am working with them. I have followed their docs and created a table in mysql my running a file like below
I have followed the commands to create model, properties etc from the below github docs
https://github.com/strongloop/loopback-example-database
create-test-data.js
var server = require('./server');
var dataSource = server.dataSources.accountDB;
var Account = server.models.account;
var accounts = [
{ email: 'foo#bar.com',
created: new Date(),
modified: new Date()
}, {
email: 'bar#bar.com',
created: new Date(),
modified: new Date()
} ];
var count = accounts.length;
dataSource.automigrate('account', function(er) {
if (er) throw er;
accounts.forEach(function(account) {
Account.create(account, function(er, result) {
if (er) return;
console.log('Record created:', result);
count--;
if(count === 0) {
console.log('done');
dataSource.disconnect();
}
});
});
});
This automatically creating table and records in my database, I don't want this.
Actually I already have a different table, which I want to connect with strongloop.
I am completely clueless, any help would be appreciated.
I found this trying to do the same thing. I fixed it so it would end gracefully. Works great for me.
Original: https://gist.github.com/serkanserttop/64fc2d4465fb154066db#file-discover-js
var path = require('path');
var app = require(path.resolve(__dirname, '../server'));
var fs = require('fs');
var loopback = require('loopback');
var app_dir = './';
require('node-babel')();
var dataSource = app.dataSources.accountDs;
var db = 'myDB',
owner = 'root';
function capitaliseFirstLetter(string) {
return string.charAt(0)
.toUpperCase() + string.slice(1);
}
function jsFileString(model_name) {
return '' + 'module.exports = function(' + capitaliseFirstLetter(model_name) + ') {\n' + '\t\n' + '};';
}
function autoGenerateModelFiles() {
dataSource.discoverModelDefinitions({
schema: db
}, function(err, models) {
var count = models.length;
console.log(models.length);
models.forEach(function(model) {
dataSource.discoverSchema(model.name, {
associations: true
}, function(err, schema) {
if (schema.options.mysql.schema !== db) {
console.log('options.mysql.schema !== db', schema);
}
fs.writeFile(app_dir + 'common/models/' + model.name + '.json', JSON.stringify(
schema, null, ' '), function(err) {
if (err) throw err;
console.log('Saved ' + model.name);
});
fs.writeFile(app_dir + 'common/models/' + model.name + '.js', jsFileString(
model.name), function(err) {
if (err) throw err;
console.log('Created ' + model.name + '.json file');
});
count = count - 1;
if (len === 0) {
console.log("DONE!", count);
dataSource.disconnect();
return;
}
});
});
});
}
What you actually need is to discover model from database. There is a documentation available on a strongloop page.
http://docs.strongloop.com/display/public/LB/Discovering+models+from+relational+databases;jsessionid=1FC0E473B7F589F4F1EFC0F25D269E3E
http://docs.strongloop.com/display/public/LB/Database+discovery+API
Here is a working example:
var ds = app.dataSources.accountDB;
ds.discoverModelDefinitions(function (err, models) {
models.forEach(function (model) {
ds.discoverSchema( model.name, function (err, schema){
console.log(schema);
});
});
});
Put this code somewhere inside server.js (i.e. inside boot method). I assume that you have setup datasource correctly and also have loppback mysql connector installed.This will loop through all tables and "schema" will contain model definition discovered from database.
You can use slc arc to generate the models based on your MySQL tables, after that you should be able to use the API to perform the basic CRUD operations. In the following link you can find more information about it:
https://strongloop.com/node-js/arc/
I'm currently working on a project that involves querying yahoo-finance for many different ticker symbols. The bottleneck is acquiring the data from yahoo, so I was wondering if there is a way I might go about speeding this up.
If I used multiple machines to query and then aggregated the data, would that help? I only have one physical machine; how might I go about doing that?
Thanks!
EDIT: Currently, I'm using Node.js, yahoo-finance, and Q.deferred to ask yahoo for historical data. Then, once all the promises are fulfilled (for each ticker), I'm doing a Q.all() to persist the data.
var data = [];
tickers = ["goog", "aapl", ...];
...
Q.all(_.map(tickers, function(symbol) {
return getYahooPromise(symbol);
}))
.done( function() { persistData(data) });
getYahooPromise retrieves data for the ticker symbol and pushes it into the data array. Once all promises are resolved, the data is persisted in a MySQL database.
SECOND EDIT:
More code:
var sequentialCalls = [];
for ( var i = 0; i < tickers.length / chunkSize; i++ ) {
sequentialCalls.push( persistYahooChunk );
}
sequentialCalls.push( function(callback) {
connection.end();
callback();
});
async.series( sequentialCalls )
exports.persistYahooChunk = function(callback) {
console.log("Starting yahoo query");
var currentTickers = tickers.slice(currentTickerIndex,currentTickerIndex + chunkSize);
return yahooFinance.historical( {
symbols: currentTickers,
from: "2015-01-28",
to: "2015-02-05"
}).then( function(result) {
console.log("Query " + currentTickerIndex + "/" + tickers.length + "completed");
currentTickerIndex += chunkSize;
//add valid data
var toPersist = _.map(result, function(quotes, symbol) {
return [symbol, quotes.length != 0 ];
});
var query = "INSERT INTO `ticker` (`symbol`, `valid`) VALUES ?";
connection.query(query, [toPersist], function(err, result) {
if (err) {
console.log (err);
}
//console.log(result);
callback();
});
});
}
The bottleneck is because you are doing one query per ticker.
Depending on the data you need to pull, if you could do a single query that includes all your tickers it would be much faster.
Here is an example if you need to get all current prices for a list of tickers, with a single query :
http://finance.yahoo.com/webservice/v1/symbols/A,B,C,D,E/quote?format=json