I'm pretty new to Java Script and tried to write a small programm to add users to a database. My problem is, that my programs doesn't add every user ones. It adds the last user as often as there are users in the list. users has more than 2 Objects and all the Objects have all field filled.
for(i = 0; i<users.length; i++)
{
var user = users[i];
console.log(user.lastonline)
pool.getConnection(function(err, connection) {
if (err) throw err;
var quer = connection.query('INSERT INTO users SET `steamid` = '+ connection.escape(user.steamid)+', `name`='+connection.escape(user.name)+', `lastonline`='+connection.escape(user.lastonline)+' ON DUPLICATE KEY UPDATE `name`='+connection.escape(user.name)+', `lastonline`='+connection.escape(user.lastonline)+'', function(err, result) {
connection.release();
});
console.log(quer.sql);
});
}
I tried to rewrite this in a lot of different way, but most time I get something like this:
TypeError: Cannot read property 'steamid' of undefined
for(i = 0; i<users.length; i++)
{
pool.getConnection(function(err, connection) {
console.log(users[i]["steamid"]);
if (err) throw err;
var quer = connection.query('INSERT INTO users SET `steamid` = '+ connection.escape(users[i]["steamid"])+', `name`='+connection.escape(users[i].name)+', `lastonline`='+connection.escape(users[i].lastonline)+' ON DUPLICATE KEY UPDATE `name`='+connection.escape(users[i].name)+', `lastonline`='+connection.escape(users[i].lastonline)+'', function(err, result) {
connection.release();
});
console.log(quer.sql);
});
}
EDIT:
Rest of the programm
var mysql = require('mysql');
var Promise = require("bluebird");
var pool = mysql.createPool({
connectionLimit : 10,
host : 'localhost',
user : 'zar',
password : 'qxLLPa06iEs2Bzsu',
database : 'zar',
socketPath: '/var/run/mysqld/mysqld.sock'
});
pool.on('connection', function (connection) {
console.log("connection made")
});
//my testing users
var users = [];
times = Date.now();
user1 = {steamid:012345678912345658,name:"user1",lastonline:times};
user2 = {steamid:012345678912345628,name:"user2",lastonline:times};
user3 = {steamid:012345678912345618,name:"user3",lastonline:times};
users.push(user1);
users.push(user2);
users.push(user3);
Edit: Fixed to use only one connection.
Previous version was getting a new connection for every user.
You should use Promises:
pool.getConnection((err, connection) => {
if (err) {
console.log(err);
return;
}
var tasks = users.map((user) => {
return new Promise((resolve, reject) => {
if (err) {
return reject(err);
}
var quer = connection.query('INSERT INTO users SET `steamid` = ' + connection.escape(user.steamid) + ', `name`=' + connection.escape(user.name) + ', `lastonline`=' + connection.escape(user.lastonline) + ' ON DUPLICATE KEY UPDATE `name`=' + connection.escape(users.name) + ', `lastonline`=' + connection.escape(users.lastonline) + '', function (err, result) {
if (err) {
return reject(err);
}
resolve(result);
});
});
});
Promise.all(tasks)
.then((results) => {
// Array of results passed in resolve
connection.release();
})
.catch((err) => {
// All errors you reject are catched here
});
});
This should workd but still, you are executing all queries in parallel, which can be pretty aggressive for the DB.
I suggest you to look into bluebird Promise.each for better results.
Related
I have data coming from different GPS tracker devices. Its a unidirectional data which means I am receiving the data and pushing it into the MySQL DB and firebase. I have a total of 300 Devices connected which are sending data to my server every 10 seconds.
My Server Specs are
AWS t2.xlarge
CPU: 4
Ram: 16GB
what happens is that after 3 days, It stops sending the data into the database. It doesn't stop the server. It just freezes. If I do this
sudo netstat -tulnap | grep :8050
It does show the process and all that but I do not see any data pushing into the DB. It just freezes. I had to reboot the server or I had to stop it using forever and restart it again
forever stop --minUptime 36000000000 server.js
And when I go to my PHPMyAdmin and check the monitor screen what I can see that I have very little free memory left and cache memory is into GBs. It seems like all the memory went into cached memory which left my server freezes. I have no idea where I am doing wrong which is causing it to freeze. For example, at the moment as I am posting a question this is my server current status
As you can see above that in 19 hours cache has been increased and its keep growing. Below is the code
if (cluster.isMaster) {
// Fork workers.
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('death', function(worker) {
// console.log('worker ' + worker.pid + ' died');
cluster.fork();
});
} else {
net.createServer(function(socket) {
// console.log('received connection...');
socket.on("error", function(err) {
// console.log("socket error: ")
// console.log(err.stack);
socket.destroy();
});
socket.on('data', function(data) {
});
});
}
Recently I have made one change in the code but still it didn't work out which was to close the socket after receiving the data every 5 seconds
socket.on('data', function(data) {
//parse data and push data into db and firebase
socket.end();
});
That's how I am doing MySQL queries
database.js
var mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit : 8,
waitForConnections : true,
queueLimit : 300,
host : 'localhost',
user : 'username',
password : '123456',
database : 'dummy'
});
module.exports = pool;
Server.js file (only database code I have pasted as full code has 1400 lines of code)
const db = require('./database');
function getCarDetails(car_id,callback) {
db.getConnection((err, connection) => {
if(err) throw err;
console.log('connected as id ' + connection.threadId);
let selectQuery = 'SELECT * FROM ?? join user_info ON car.user_id = user_info.user_id WHERE ?? = ?';
let query = mysql.format(selectQuery, ["car", "id", car_id]);
// query = SELECT * FROM `todo` where `user` = 'shahid'
connection.query(query, (err, data) => {
connection.release();
if(err) {
console.error(err);
//throw err;
return;
}
// rows fetch
if(data.length > 0){
return callback(data[0]);
}else{
return callback(false);
}
});
});
}
function updateIgnitionNotification(car_id,acc_on,acc_off,acc,speed,updated,callback) {
db.getConnection((err, connection) => {
let updateQuery = 'UPDATE car SET notification_acc_on = ?,notification_acc_off = ?,acc = ?,speed = ?,updated = ? Where id = ?';
let query = mysql.format(updateQuery, [acc_on, acc_off,acc,speed,updated, car_id]);
// query = UPDATE `todo` SET `notes`='Hello' WHERE `name`='shahid'
connection.query(query, (err, data) => {
connection.release();
if(err) {
console.error(err);
//throw err;
return;
}else{
return callback(data);
}
});
});
}
function updateLastUpdatedData(car_id,current_datetime,status,acc,monitoring,max_speed,callback){
db.getConnection((err, connection) => {
let updateQuery = 'UPDATE car SET updated = ?,status = ?,acc = ?,monitoring = ?, max_speed = ? Where id = ?';
let query = mysql.format(updateQuery, [current_datetime,status,acc.toUpperCase(),monitoring, max_speed, car_id]);
// query = UPDATE `todo` SET `notes`='Hello' WHERE `name`='shahid'
connection.query(query, (err, data) => {
connection.release();
if(err) {
console.error(err);
//throw err;
return;
}else{
return callback(data);
}
});
});
}
function updateCommand(car_id,command,callback){
db.getConnection((err, connection) => {
let updateQuery = 'UPDATE car SET command = ? Where id = ?';
let query = mysql.format(updateQuery, [command, car_id]);
// query = UPDATE `todo` SET `notes`='Hello' WHERE `name`='shahid'
connection.query(query, (err, data) => {
connection.release();
if(err) {
console.error(err);
//throw err;
return;
}else{
return callback(data);
}
});
});
}
function updateCarLockNotification(car_id,lock_notification,callback){
db.getConnection((err, connection) => {
let updateQuery = 'UPDATE car SET lock_notification = ? Where id = ?';
let query = mysql.format(updateQuery, [lock_notification, car_id]);
// query = UPDATE `todo` SET `notes`='Hello' WHERE `name`='shahid'
connection.query(query, (err, data) => {
connection.release();
if(err) {
console.error(err);
//throw err;
return;
}else{
return callback(data);
}
});
});
}
function updateOverSpeedNotification(car_id,notification_over_speed,callback){
db.getConnection((err, connection) => {
let updateQuery = 'UPDATE car SET notification_over_speed = ? Where id = ?';
let query = mysql.format(updateQuery, [notification_over_speed, car_id]);
// query = UPDATE `todo` SET `notes`='Hello' WHERE `name`='shahid'
connection.query(query, (err, data) => {
connection.release();
if(err) {
console.error(err);
//throw err;
return;
}else{
return callback(data);
}
});
});
}
function updateGeoFenceOutsideAttempt(car_id,geofence_attempt,callback){
db.getConnection((err, connection) => {
let updateQuery = 'UPDATE car SET geofence_outside_attempt = ? Where id = ?';
let query = mysql.format(updateQuery, [geofence_attempt, car_id]);
// query = UPDATE `todo` SET `notes`='Hello' WHERE `name`='shahid'
connection.query(query, (err, data) => {
connection.release();
if(err) {
console.error(err);
//throw err;
return;
}else{
return callback(data);
}
});
});
}
function updateGeoFenceInsideAttempt(car_id,geofence_attempt,callback){
db.getConnection((err, connection) => {
let updateQuery = 'UPDATE car SET geofence_inside_attempt = ? Where id = ?';
let query = mysql.format(updateQuery, [geofence_attempt, car_id]);
// query = UPDATE `todo` SET `notes`='Hello' WHERE `name`='shahid'
connection.query(query, (err, data) => {
connection.release();
if(err) {
console.error(err);
//throw err;
return;
}else{
return callback(data);
}
});
});
}
function updateBatteryNotification(car_id,battery,callback){
db.getConnection((err, connection) => {
let updateQuery = 'UPDATE car SET notification_battery = ? Where id = ?';
let query = mysql.format(updateQuery, [battery, car_id]);
// query = UPDATE `todo` SET `notes`='Hello' WHERE `name`='shahid'
connection.query(query, (err, data) => {
connection.release();
if(err) {
console.error(err);
//throw err;
return;
}else{
return callback(data);
}
});
});
}
function saveNotificationLog(log,callback){
db.getConnection((err, connection) => {
// let insertQuery = 'INSERT INTO ?? (??,??) VALUES (?,?)';
//let query = mysql.format(insertQuery,["log","user","notes",data.user,data.value]);
// query = UPDATE `todo` SET `notes`='Hello' WHERE `name`='shahid'
connection.query('INSERT INTO log SET ?', log, (err, res) => {
connection.release();
if(err) {
return callback(res);
}else{
return callback(res);
}
});
});
}
function saveHistory(history,callback){
db.getConnection((err, connection) => {
// let insertQuery = 'INSERT INTO ?? (??,??) VALUES (?,?)';
//let query = mysql.format(insertQuery,["log","user","notes",data.user,data.value]);
// query = UPDATE `todo` SET `notes`='Hello' WHERE `name`='shahid'
connection.query('INSERT INTO car_history SET ?', history, (err, res) => {
connection.release();
if(err) {
return callback(res);
}else{
return callback(res);
}
});
});
}
function updateGeoFenceNotification(car_id,notification_inside,notification_outside,callback){
db.getConnection((err, connection) => {
let updateQuery = 'UPDATE car SET notification_inside = ?,notification_outside = ? Where id = ?';
let query = mysql.format(updateQuery, [notification_inside, notification_outside, car_id]);
// query = UPDATE `todo` SET `notes`='Hello' WHERE `name`='shahid'
connection.query(query, (err, data) => {
connection.release();
if(err) {
console.error(err);
//throw err;
return;
}else{
return callback(data);
}
});
});
}
Please help me to resolve this issue
I'll attempt an answer, or at least some observations.
It's probably worth your effort to get your nodejs app working robustly without clustering. Keeping things simple helps with troubleshooting. Once it is solid you can add clustering.
Your server is overprovisioned. The workload you have is 1,800 connections per minute (every six seconds from 300 devices) or 30/sec. A 2-core server with 4-8GiB of RAM should be just fine for this workload if your program is written well. A 1-core server may be enough. And, a program that's expected to stay up for thousands of hours necessarily must be written well. (Disregard this advice if your database server runs on the same virtual machine as your nodejs app.)
You didn't describe how you connect to your database server from your nodejs code. You should use connection pooling to take away the need to close and reopen connections a lot.
Do something like this in your initialization code:
const mysql = require('mysql')
const mySqlPool = mysql.createPool({
connectionLimit : 8, /* make this as small as possible */
waitForConnections : true,
queueLimit : 300, /* enough for 6sec worth of workload */
host : 'dbhost',
user : 'bob',
password : 'secret',
database : 'my_db',
})
/* make the pool available to other code.
* There may be a better way to do this. */
global.mySqlPool = mySqlPool
/* make a little monitor function to
* let you know of database problems.
* this pings the database every 10sec */
let monitorTimeout = setTimeout ( function() {
global.mySqlPool.ping ( function (err) {
if (err) {
clearTimeout(monitorTimeout)
console.error('database connection error', err)
throw err
}
})
}, 10000)
Then when you need to access your database use global.mySqlPool. in place of db. in the code you have. Each query will use a connection from the pool. If all the connections are in use, the query will wait in a queue until a connection is free.
This pooling / queueing strategy puts a much more predictable load on both your nodejs program and your MySQL database.
The same sort of thing should be possible for firebase.
Once you have made database connection pooling work properly, keeping the pool sizes small, your next step for performance improvement is a little harder. But it will make a vast difference.
What you do is this: put your UPDATE operations into a simple queue inside your app. That is, modify your updateWhatever( car_id, whatever ) functions to push() their SQL statements and parameters onto a shared array.
Then, write a function that uses shift() to fetch those items from the array and run them one after the other on the dbms. When the array has multiple items in it, wrap those items in a single database transaction.
This helps performance a lot: most of the MySQL server's work to handle INSERT and UPDATE operations happens when those operations are COMMITed. (If you don't start transactions explicitly, MySQL uses autocommit for every operation.) So if you bundle them together in transaction bundles of a few tens of operations, you put a lot less load on MySQL.
It also reduces your application's potential need to handle many UPDATE operations concurrently on different connections. That in turn reduces contention for access to the tables.
(It's true that the shift() operation on an array has an in-memory performance hit if the array is large. If that turns out to be a problem you can replace the easy-to-program array / push() / shift() queue discipline with a purpose-built queue package.)
I have a nodejs module which get result from a mysql database and insert into another mysql database within a given interval. After few queries it just get stuck and throw "ER_CON_COUNT_ERROR" and says Too many connections. My module is as follows:
function get_data() {
const mysql = require('mysql');
const moment = require('moment');
var items_per_query = 1000;
let to_connection = mysql.createConnection({
host: 'localhost',
user: 'username',
password: 'pass',
database: 'todatabase'
});
let from_connection = mysql.createConnection({
host: 'localhost',
user: 'username',
password: 'pass',
database: 'fromdatabase'
});
from_connection.query("SELECT p.*, d.uniqueid as imei FROM tc_positions p left join tc_devices d on d.id = p.deviceid order by p.id desc limit " + items_per_query, function (err, result, fields) {
if (err) throw err;
var items = [];
var table_columns_list = {};
if (Object.keys(result).length > 0) {
Object.keys(result).forEach(function (key) {
var x = result[key];
table_columns_list = {
'dt_server': moment(x['servertime']).format('YYYY-MM-DD HH:MM:ss'),
'dt_tracker': moment(x['devicetime']).format('YYYY-MM-DD HH:MM:ss'),
'lat': x['latitude'],
'long': x['longitude'],
'altitude': x['altitude'],
'angle': x['course'],
'speed': x['speed'],
'params': x['attributes'],
'fix_time': moment(x['fixtime']).format('YYYY-MM-DD HH:MM:ss'),
'accuracy': x['accuracy'],
'network': x['network']
}
items.push({ 'id': x['id'], 'table_name': 'table_' + x['imei'], 'table_columns': table_columns_list });
});
}
if (items.length >=500) {
var items_to_be_removed = [];
var total_rows_inserted = 0;
for (var x = 0; x < items.length; x++) {
to_connection.query(imei_insert, function (err, results, fields) {
if (err) {
console.log(err.message);
}
else {
let createTable = " create table if not exists .... ";
to_connection.query(createTable, function (err, results, fields) {
if (err) {
console.log(err.message);
}
else {
let insert_data = "INSERT INTO .... ";
to_connection.query(insert_data, function (err, results, fields) {
if (err) {
console.log(err.message);
}
total_rows_inserted++;
items_to_be_removed.push(row_id);
if (total_rows_inserted == items.length) {
//remove data from traccar positions table that were inserted to platform
var ids = items_to_be_removed.join(",");
from_connection.query("DELETE FROM tc_positions where id IN(" + ids + ")", function (err, results, fields) {
if (err) throw err;
console.log('removed ' + total_rows_inserted + ' rows from traccar');
return get_data(); // after finish all task call the same function again
});
}
});
}
});
}
});
}
}
else {
setInterval(get_data, 15000);
}
});
}
and I just call get_data(); function to run. Is there any efficient way to run this module smoothly. I should run it 7 days 24 hours without any interruption.
You're creating a new connection to the servers each time get_data() runs, which is overwhelming them. You need to reuse connections, as for example with connection pooling.
I am trying to trigger csv file upload in s3 and insert the data from the file to database using lambda.
Most of the times code executes successfully if i run the code back to back in couple of seconds gap.
But sometimes the problem i face is the code stops execution at console console.log('about to get the data'); and ignore rest of the code and sometimes mysql connection gets time out.
I can find that the problem occurs only when i test the lambda code with more than 20 seconds of gap. So, i guess this is a cold start problem.
I don't want to miss even a single s3 trigger. So, i need help to find flaw in my code that is causing this problem.
const AWS = require('aws-sdk');
const s3 = new AWS.S3({region: 'ap-south-1', apiVersion: '2006-03-01'});
var mysql= require('mysql');
var conn = mysql.createPool({
connectionLimit: 50,
host: 'HOST',
user: 'USER',
password: 'PASSWORD',
database: 'DATABASE'
})
async function mainfunc (event, context, callback) {
console.log("Incoming Event: ", JSON.stringify(event));
const bucket = event.Records[0].s3.bucket.name;
const filename = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: bucket,
Key: filename
};
console.log('about to get the data'); //Code stops here some times
return await getresult(params);
};
async function getresult(params){
var result = await s3.getObject(params).promise();
var recordList = result.Body.toString('utf8').split(/\r?\n/).filter(element=>{
return element.length> 5;
})
recordList.shift()
var jsonValues = [];
var jsonKeys = result.Body.toString('utf8').split(/\r?\n/)[0]
recordList.forEach((element) => {
element = element.replace(/"{2,}/g,'"').replace(/, /g,"--").replace(/"{/, "{").replace(/}"/, "}").replace(/,/g, ';').replace(/--/g,', ').split(';');
jsonValues.push(element)
});
var lresult = await query(jsonKeys, jsonValues);
return lresult;
}
async function query(jsonKeys, jsonValues){
var qresult = await conn.getConnection(function(err, connection) {
if (err){
console.log(err,'------------------------------------');// Sometimes i get Sql Connection timeout error here
} else {
console.log("Connected!");
var sql = "INSERT INTO reports ("+jsonKeys+") VALUES ?";
connection.query(sql, [jsonValues], function (err, result) {
if (err){
console.log(err);
connection.release()
return err;
} else {
console.log("1 record inserted");
console.log(result);
connection.release()
return result;
}
});
}
})
}
exports.handler = mainfunc
I have solved the issue by using promise in the "query" function
function query(jsonKeys, jsonValues){
return new Promise(function(resolve, reject) {
conn.getConnection(function (err, connection) {
if (err) {
console.log(err, '------------------------------------');
}
else {
console.log("Connected!");
var sql = "INSERT INTO principal_reports (" + jsonKeys + ") VALUES ?";
connection.query(sql, [jsonValues], function (err, result) {
if (err) {
console.log(err);
connection.release();
reject(err)
}
else {
console.log("1 record inserted");
console.log(result);
connection.release();
resolve(result)
}
});
}
})
})
}
and changed the code
var lresult = await query(jsonKeys, jsonValues);
to
var lresult = await query(jsonKeys, jsonValues).then(data =>{
return data;
}).catch(error =>{
return error;
});
I want to count a line in table that has FOO table.
The following code has a bug which show only the last db_name.
RESULT IS LOOK LIKE THIS:
db_0099,0
db_0099,5
db_0099,10
db_0099,3
Could you please suggest me how to fix the nodejs code?
var mysql = require('mysql');
var sql1 = "SELECT table_schema as db_name from information_schema.tables WHERE table_name = 'FOO' ";
var sql2 = "SELECT COUNT(*) as solution FROM {0}.FOO";
var connection = mysql.createConnection({
host : '$$$$$$$',
user : '$$$$$$$',
password : '$$$$$$$',
});
connection.connect(function(err){
console.log('connected as id ' + connection.threadId);
});
connection.query(sql1, function(err, result) {
if (err) throw err;
for (var i = 0, len = result.length; i < len; i++) {
var db_name = result[i].db_name;
console.log(db_name);
connection.query(sql2.replace("{0}",db_name), function(err, result) {
if (err) throw err;
console.log(db_name+','+result[0].solution); //Here db_name is showed only the last one.
});
};
connection.end();
});
i advice a two step solution to this problem:
use connection pooling
var pool = mysql.createPool({
host : 'xxxxx',
user : 'xxxxx',
password : 'xxxxx',
connectionLimit : 100
});
pool can do auto connection, so don't connect to your db, just
pool.query(sql,function(err,res){})
this way you use one connection for each query, which will be closed automatically after using it.
use async await for asyncronous sequential queries.
for that create a getResult function which returns a promise
function getResult(sql){
return new Promise(function(resolve,reject){
pool.query(sql, function(err, result){
if(err){
reject(err)
}else{
resolve(result)
}
})
})
}
then you can await each query in the loop
pool.query(sql1, async function(err, result) {
if (err) throw err;
for (var i = 0; i < result.length; i++) {
var db_name = result[i].db_name;
console.log(db_name);
var sql = sql2.replace("{0}",db_name)
var res = await getResult(sql)
console.log(db_name+','+res[0].solution); //Here db_name is showed only the last one.
};
pool.end()
});
P.S.: async await is a feature of the upcomming node 8.0.0 release in april. for node 7.x you will have to start your script with a commandline switch
node --harmony-async-await yourscript.js
Have you verify the content of result ?
console.log(result);
If it's okay try this :
solutions = results.map(result => {
let dbName = result.db_name;
let queryResult;
connection.query(sql2.replace("{0}", dbName), function(err, result) {
if (err) {
throw err;
} else {
queryResult = `${db_name}, ${result[0].solution}`
console.log(queryResult);
}
});
return queryResult;
})
console.log(solutions);
However, try to use a ORM or a sql parser for your query !
Try this one :)
https://hiddentao.com/squel/
I'm new to Node.js. I have a function 'getFromDb' that accesses a mysql database and returns a json file with some data. What if I have an array of query data and I want to call the same function through a for loop to get a json file for each element of the array?
var http = require('http');
http.createServer(function(req, res) {
console.log('Receving request...');
var callback = function(err, result) {
res.setHeader('Content-disposition', 'attachment; filename=' + queryData+ '.json');
res.writeHead(200, {
'Content-Type' : 'x-application/json'
});
console.log('json:', result);
res.end(result);
};
getFromDb(callback, queryData);}
).listen(9999);
function getFromDb(callback, queryData){
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'localhost',
user : 'xxxx',
password : 'xxxx',
database : 'xxxx',
port: 3306
});
connection.connect();
var json = '';
var data = queryData + '%';
var query = 'SELECT * FROM TABLE WHERE POSTCODE LIKE "' + data + '"';
connection.query(query, function(err, results, fields) {
if (err)
return callback(err, null);
console.log('The query-result is: ', results);
// wrap result-set as json
json = JSON.stringify(results);
/***************
* Correction 2: Nest the callback correctly!
***************/
connection.end();
console.log('JSON-result:', json);
callback(null, json);
});
}
You could use the async library for node for this. That library has many functions that make asynchronous programming in NodeJS much easier. The "each" or "eachSeries" functions would work. "each" would make all the calls to mysql at once time, while "eachSeries" would wait for the previous call to finish. You could use that inside your getFromDB method for your array.
See:
https://github.com/caolan/async#each
var http = require('http'),
async = require('async');
http.createServer(function(req, res) {
console.log('Receving request...');
var callback = function(err, result) {
res.setHeader('Content-disposition', 'attachment; filename=' + queryData+ '.json');
res.writeHead(200, {
'Content-Type' : 'x-application/json'
});
console.log('json:', result);
res.end(result);
};
getFromDb(callback, queryData);}
).listen(9999);
function getFromDb(callback, queryData){
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'localhost',
user : 'xxxx',
password : 'xxxx',
database : 'xxxx',
port: 3306
});
connection.connect();
var arrayOfQueryData = ["query1", "query2", "query3", "query4", "query5"];
var jsonResults = [];
async.each(arrayOfQueryData, function (queryData, cb) {
var data = queryData + '%';
var query = 'SELECT * FROM TABLE WHERE POSTCODE LIKE "' + data + '"';
connection.query(query, function(err, results, fields) {
if (err)
return cb(err);
console.log('The query-result is: ', results);
// wrap result-set as json
var json = JSON.stringify(results);
console.log('JSON-result:', json);
jsonResults.push(json);
cb();
});
}, function (err) {
connection.end();
// callbacks from getFromDb
if (err) {
callback(err);
}
else {
callback(null,jsonResults);
}
});
}
use async module. it is the best one. If u dont want to add new module try following;
var count = 0;
array.forEach(function(element) { //array of the data that is to be used to call mysql
++count; //increase counter for each service call
async.db.call(element, callback); //the async task
}
var data = [];
function callback(err, resp) {
--count;//subtract for each completion
data.push(resp)
if(count == 0) { //return data when all is complete
return data;
}
}
I would recommend the async module though. it is very good practice and useful.