Node.js mysql pool issue - mysql

I've started to use pool connection to mysql, instead of using normal connection with Node.js. Because I need to release connection after process has finish. When connection keeps open, Node.js getting in exception due to mysql timeout issue.
I've edited my code and not it gives me error like that in Line 94:
TypeError: Object #<Connection> has no method 'release'
at Query._callback (/usr/local/src/conn.js:94:28)
Above that line :
socket.on('getUnreadPM', function (msg) {
var userSocket = clients[msg.userDestID];
if (msg.lastGotMessage == 'none') {
post = 'Select * from messages where (destUserID=\'' + msg.userDestID + '\' AND srcUserID=\'' + msg.userSrcID + '\') OR (destUserID=\'' + msg.userSrcID + '\' AND srcUserID=\'' + msg.userDestID + '\')';
} else {
post = 'Select * from messages where destUserID=\'' + msg.userDestID + '\' AND srcUserID= \'' + msg.userSrcID + '\' AND messageSendDate between \'' + msg.lastGotMessage + '\' and \'' + msg.deviceCurrentTime + '\' AND messageSendDate > \'' + msg.lastGotMessage + '\' order by messageSendDate';
}
pool.getConnection(function (err, connection) {
// Use the connection
connection.query(post, function (error, results, fields) {
// And done with the connection.
if (results.length > 0) {
if (typeof userSocket !== 'undefined') {
var boolData = results[0];
//console.log('bool data lenght : ',results.length);
var messageID = new Array();
var srcUserID = new Array();
var destUserID = new Array();
var messageContent = new Array();
var messageDate = new Array();
var didRead = new Array();
for (var i = 0; i < results.length; i++)
{
messageID.push(results[i].messageID);
srcUserID.push(results[i].srcUserID);
destUserID.push(results[i].destUserID);
messageContent.push(results[i].messageContent);
messageDate.push(dateFormat(results[i].messageSendDate, "yyyy-mm-dd HH:MM:ss"));
didRead.push(results[i].didRead);
}
userSocket.emit('history :', {
'dataMode': 'history',
'srcUserID': srcUserID,
'destUserID': destUserID,
'messageContent': messageContent,
'messageSendDate': messageDate,
'didread': didRead
});
} else {
userSocket.emit('history :', {
'dataMode': 'none',
});
}
} else {
userSocket.emit('history :', {
'dataMode': 'none',
});
}
connection.release();
});
});
});

The mysql connection has no release method. So you can't release it.
I've written such a node module for my application a few months ago, you can find it here: https://github.com/LinuxDoku/node-mysql-pool
It creates an pool of connections at startup, queues your queries and sends them successive to the database. After getting the result, the connection will be released for new queries.

Related

nodejs mysql pool connection just idle

I am creating a nodejs module which retrieve some data from a mysql database and insert into another mysql database after some data processing. My main requirement is to make the module alive 24 hours even there is no data in the first database.. it will just keep checking for any new data. But unfortunately the module just doing nothing after few minutes of running. My function is as follows:
var to_pool = mysql.createPool({
connectionLimit: 100,
host: 'localhost',
user: 'username',
password: 'password',
database: 'toDatabase',
multipleStatements: true
});
var from_pool = mysql.createPool({
connectionLimit: 100,
host: 'localhost',
user: 'username',
password: 'password',
database: 'fromDatabase'
});
get_data(to_pool, from_pool);
var items_per_query = 100;
function get_data(to_pool, from_pool) {
from_pool.getConnection(function (err, from_connection) {
if (err) throw err; // not connected!
//main database query
from_connection.query("SELECT p.*, d.uniqueid as imei FROM tc_positions p left join tc_devices d on d.id = p.deviceid order by p.id desc limit " + items_per_query, function (err, result, fields) {
if (err) throw err;
var items = [];
if (Object.keys(result).length > 0) {
Object.keys(result).forEach(function (key) {
var x = result[key];
items.push({ 'id': x['id'], 'table_name': x['imei'], 'table_columns': table_columns_list });
});
}
if (items.length >= items_per_query) {
var items_to_be_removed = [];
let imei_insert = "";
let insert_data = "";
for (var x = 0; x < items.length; x++) {
let all_values = "";
let i = 0;
for (let v of Object.values(items[x]['table_columns'])) {
i++;
all_values += "'" + v + "'";
if (i < Object.keys(items[x]['table_columns']).length) {
all_values += ",";
}
}
insert_data += "INSERT INTO " + items[x]['table_name'] + "(dt_server,dt_tracker,lat,lng,altitude,angle,speed,params,fix_time,accuracy,network) VALUES(" + all_values + "); ";
items_to_be_removed.push(items[x]['id']);
if (items_to_be_removed.length == items_per_query) {
var final_query = imei_insert + ' ' + createTable + ' ' + insert_data;
to_pool.getConnection(function (err, platform_connection) {
if (err) throw err;
platform_connection.query(final_query, function (err, results, fields) {
if (err) throw err;
var ids = items_to_be_removed.join(",");
from_connection.query("DELETE FROM tc_positions where id IN(" + ids + ")", function (err, results, fields) {
if (err) throw err;
console.log('removed ' + items_to_be_removed.length + ' rows from traccar');
items_to_be_removed = [];
insert_data = "";
from_connection.destroy();
platform_connection.destroy();
// after finish all task call the same function again
return get_data(to_pool, from_pool);
});
});
});
}
}
}
else {
setInterval(function () { get_data(to_pool, from_pool); }, 10000);
}
});
});
}
the get_data() function is being called every 10 secs but the "main database query" portion never execute after sometimes. Is there any way to execute the database query again and again as the get_data() function call?
it is better to use a package manager like PM2 and start your script like this
pm2 start app.js
no need to setup intervals in your code, let the code run and exit, PM2 will restart it automatically when it stops running, this will make your code alive 24 hours as per your requirement, you can also setup delays or setup restart strategies

Loop through MySQL rows and store results in array

I am trying to store details of affectedRows from a MySQL INSERT query using NodeJS. My mind is melting trying to comprehend callbacks and Promises. Being a single-man dev team I wanted to reach out and ask for the clearest explanation as to how a callback can be applied here in a foreach loop.
The goal should be clear from these few lines of code; store data in the affected_rows[] array.
var affected_rows = [];
asset_array.forEach(function(asset) { // Populate the asset table
var query_string = "INSERT IGNORE INTO " + asset_table + " SET symbol = '" + asset[0] + "', name = '" + asset[1] + "'";
connection.query(query_string, function(err, rows, fields) {
if (err) throw err;
if ( rows.affectedRows > 0 ) {
data_to_push = [asset_table, asset[0], asset[1]];
affected_rows.push(data_to_push);
}
});
});
console.log(affected_rows); // [] for obvious async reasons
One option would be to process the asset_array inside a function and pass a callback into it and when loops through asset_array check if the current index matches the asset_array length (-1). If so call the callback.
var affected_rows = [];
function processAssets(cb) {
var array_len = asset_array_len.length
asset_array.forEach(function(asset, index) {
var query_string = 'INSERT IGNORE INTO ' + asset_table + ' SET symbol = \'' + asset[0] + '\', name = \'' + asset[1] + '\'';
connection.query(query_string, function(err, rows, fields) {
if (err) throw err
if (rows.affectedRows > 0) {
data_to_push = [asset_table, asset[0], asset[1]];
affected_rows.push(data_to_push);
}
if (index === (array_len - 1)) cb()
});
});
}
processAssets(function() {
console.log(affected_rows)
})
Will suggest you to have a look at async Queue.
You can change your code like this to use it.
//2nd Step - Perform each task and then call callback() to move to next task
var q = async.queue(function(query_string, callback) {
connection.query(query_string, function(err, rows, fields) {
if (err) throw err;
if ( rows.affectedRows > 0 ) {
data_to_push = [asset_table, asset[0], asset[1]];
affected_rows.push(data_to_push);
}
callback(); //call next task
});
}, 2); //here 2 means concurrency ie 2 tasks will run in parallel
//Final Step - Drain gives you end of queue which means all tasks have finished processing
q.drain = function() {
//Do whatever you want after all tasks are finished
};
//1st Step - create a queue of all tasks that you need to perform
for (var i = 0; i < asset_array.length ; i++) {
var query_string = "INSERT IGNORE INTO " + asset_table + " SET symbol = '" + asset[0] + "', name = '" + asset[1] + "'";
q.push(query_string);
}

Read line by line and inserting into mysql nodeJS

I made some code to parse things from some file.log it got 2 different lines that one is line of command and finishing line. So there are 1 000 000 lines of log which is 500 000 entries into DB.
Problem inserting and updating database is to slow.
function getDate(){
var date = new Date().getTime() + (2 * 60 * 60 * 1000);
return new Date(date).toISOString().replace(/T/, ' ').replace(/\..+/, ''); // DATETIME in format yyyy-mm-dd hh:mm:ss
}
var detailLog = false;
var express = require("express");
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : '',
database : 'logger'
});
var app = express();
connection.connect(function(err) {
if (!err) {
console.log(getDate(), "Database is connected ... \n");
} else {
console.log(getDate(), "Error connecting database ... \n");
}
});
var file = 'file.log';
var fs = require('fs');
var rl = require('readline').createInterface({
input : require('fs').createReadStream(file)
});
rl.on('line', function(line) {
//searching 'command' since it is in every odd line
if (line.search('command') >= 0) {
//parsing every odd line of log
// insert in logs table
var queryINS = "";
if (detailLog)
console.log(getDate(),'shooting INSERT ');
connection.query(queryINS,
function(err, rows, fields) {
if (err) {
console.log(getDate(), 'Error while performing
insert into logs query: ', queryINS,err);
}
});
if (detailLog)
console.log(getDate(),'INSERT done');
//searching 'Request finished' since it is in every even line
} else if (line.search('Request finished') >= 0) {
//parsing every even line of log
// update logs table
var queryUPD = "";
if (detailLog)
console.log(getDate(),'shooting UPDATE ');
connection.query(queryUPD,
function(err, rows, fields) {
if (err) {
console.log(getDate(), 'Error while performing update
logs query: ',queryUPD,err);
}
});
if (detailLog)
console.log(getDate(),'UPDATE done');
}
}).on('close', function() {
console.log(getDate(), 'Inserted logs into table.');
});

How to handle nodejs async with mysql?

Community,
I am new at nodejs and now i have a problem i cant solve: The async in javascript/nodejs. How can i handle the following so i can push the usernames to the array?
I already tried to help myself with many different functions but nothing works for me... :/
Sincerely Adhskid.
function getCurrentBetInformations () {
connection.query('SELECT * FROM `BETS` WHERE BET_ACTIVE = "1" LIMIT 1', function(err, rowss, fields) {
if (err) logger.warn('MySQL Error: ' + err.stack);
betid = rowss[0].BET_ID;
betends = rowss[0].BET_END;
connection.query('SELECT * FROM `BETS_BID` WHERE BID_BET_ID=\'' + betid + '\'', function(err, betbids, fields) {
if (err) logger.warn('MySQL Error: ' + err.stack);
var betQuants = new Array();
var betIds = new Array();
var betUsernames = new Array();
var betDates = new Array();
var rowsAffected = betbids.length;
for(i=0; i < rowsAffected; i++) {
betQuants.push(betbids[i].BID_KEYS_COUNT);
betIds.push(betbids[i].BID_ID);
var betSender = betbids[i].BID_SENDER;
connection.query('SELECT `USER_NAME` FROM `USER` WHERE `USER_STEAMID` = \'' + betSender + '\' LIMIT 1', function(err, rows, fields) {
if (err) logger.warn('MySQL Error: ' + err.stack);
console.log(rows[0].USER_NAME);
addUsername(rows[0].USER_NAME);
});
function addUsername (currentUsername) {
betUsernames.push(currentUsername);
}
betDates.push(betbids[i].BID_TIME);
if(betUsernames.length === i) {
execSiteRef();
}
}
function execSiteRef() {
console.log(betUsernames);
sendUserSiteRefresh([betQuants, betIds, betUsernames, betDates], betends);
}
});
});
}
I think your problem comes from this part:
if(betUsernames.length === i) {
execSiteRef();
}
You should iinstead check if the betUsernames array is of the final size:
if(betUsernames.length === rowsAffected) {
execSiteRef();
}
maybe there is more errors though, I did not check closely.

multiple async mongo request generate messed up returns

I'm trying to build a JSON out of multiple requests on my mongodb.
since I'm not using DBRef, I have to build the "table joints" by myself, and that's how I ended up in this mess.
This is the code that is giving me the headaches from a couple of days now.
(the mongo part is done with mongoskin)
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
userfeed.find({},args).toArray(function(e, feed) {
if (e) console.log("error: ", e);
// gather aparel infos
var i=0;
var ret_feeds = [];
feed.forEach(function(cur_feed) {
var outfits=[];
console.log("beginning with: " + cur_feed.url);
var resfeed = "";
resfeed = cur_feed;
resfeed.url = baseurl + snapurl + resfeed.url + "_small.jpg";
i=0;
cur_feed.apparel_ids.forEach(function(item) {
/*>>*/ apparel.find({"_id": item},{limit:1}).toArray(function(e, results) {
console.log(">>>>>>>>>>> APPAREL_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
outfits.push(results); // quick and dirty, 2 b refined..
i++;
if(i>=cur_feed.apparel_ids.length)
{
// pack it up
// resfeed.url = resfeed.url;
resfeed.outfits = outfits;
resfeed.fav = false;
resfeed.bough = false;
// retrieve store infos
/*>>>*/ store.find({"_id":resfeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t############# STORE_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
resfeed.store = resstore[0];
resfeed.store.class = "hem";
ret_feeds.push(resfeed);
if(ret_feeds.length >= feed.length)
{
console.log("\t\t######################calling return [ ret_feeds.length = " + ret_feeds.length + " feed.length = " + feed.length);
out.send(ret_feeds);
}
});
}
});
});
});
});
}
This code fails, because returns the json before finishing its task, so the next time that it tries to return another json it crashes miserably due to the fact the the headers have already been sent.
Now as you can see, I have 3 collections: userfeed, apparel and stores.
the goal of this function is to retrieve all the items in the userfeed collection, extract the outfits (based on the outfit_id array that is part of the userfeed collection), and also extract the store infos related in the same way to each userfeed entry, like so:
I know that async.js or equivalent is the way to go: I've red like a gazillion of other posts here on SO, but I still can't get my head around it, probably because the whole mechanism behind the async.js or flow control in general it's still out of focus in my mind.
I'm still a noob at node :)
UPDATE
I think I found the right path for understanding here: http://www.sebastianseilund.com/nodejs-async-in-practice
this guy made a terrific job in describing use-case by use-case all the ways to apply async.js to your code.
I'll post the solution as soon as I get around it.
UPDATE 2
Thanks to the above dude I could work out a working solution, below is the answer.
After so much struggling I have finally managed to get a solution.
async.js was the answer as I was (obviously) suspecting.
FYI here's the working code.
If you like to point out improvements or anything else, you are more than welcome
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
var args;
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
var outfits=[];
var feeds = array();
async.series([
// userfeed find
function(callback) {
userfeed.find({},args).toArray(function(e, feed) {
if(e) callback(e);
feeds = array(feed);
console.log(feeds.length + " retrieved. stepping in");
callback(null, null);
});
},
// join
function(callback) {
async.forEach(feeds, function(thefeed, callback) {
var i = feeds.indexOf(thefeed);
async.parallel([
// load apparel infos
function(callback) {
console.log("\t >>> analyzing thefeed id " + thefeed._id);
async.forEach(thefeed.apparel_ids, function(apparel_id, callback) {
apparel.find({"_id": apparel_id},{limit:1}).toArray(function(e, results) {
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
console.log("\t\t### pushing data into thefeed_index: " + i);
if(!util.isArray(feeds[i].oufits)) feeds[i].outfits = array();
feeds[i].outfits.push(results);
callback(null, null);
});
}, callback);
},
// load store infos
function(callback) {
store.find({"_id":thefeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t### STORE_FIND");
if (e) console.log("error: ", e);
feeds[i].store = resstore[0];
feeds[i].store.class = "hem";
callback(null, null);
});
}
], callback);
}, callback);
}
// MAIN
], function(err, result) {
console.log("feed retrieval completed. stepping out");
if (err) return next(err);
out.send(feeds);
});
};