Node micro-service continuously querying mysql db - mysql

I am new to Node.js and I am trying to create a simple micro-service where it continuously polls records from a database, execute asynchronous jobs on those records and update the state of those records in the database once the job is done.
Basically this is my inner query and loop:
const con = mysql.createConnection({
host: 'localhost',
user: 'user',
password: 'password',
database: 'sitepoint'
});
con.query('SELECT * FROM records where flag = 0', (err,rows) => {
if(err) throw err;
rows.forEach( (row) => {
someFunction(row, function(result,err){
if(err) throw err;
//Update record in db
}
});
});
This is a rough idea of what I'm doing, my issue is I want to do this continuously on a set interval, say every 1 minute yet I do want the interval to be calculated after processing the last row in my query. In other words I want to block until all fetched rows are processed. What are my options in node.js?

I'd suggest using a simple pattern used in plenty of servers, where jobs queue themselves, you get the benefit that the interval can change each time and the interval between job completions is fixed rather than the interval between job starts being fixed.
function getQueryInterval() {
// Could read from a DB, Redis, etc.
return 60000;
}
function processRow(row) {
/* Do some good stuff with the row. */
return Promise.resolve('OK');
}
function runQuery() {
console.log(new Date().toISOString(), 'runQuery: Running..');
// Return a master promise, this will only resolve when everything is complete.
// However as soon as an error is encountered it will reject.
return new Promise((resolve, reject) => {
let processPromises = [];
con.query('SELECT * FROM records where flag = 0', (err,rows) => {
if(err) reject(err);
rows.forEach( (row) => {
someFunction(row, function(result,err) {
if(err) reject(err);
//Update record in db
processPromises.push(processRow(row));
});
});
// Only resolve when all records are processed.
Promise.all(processPromises).then((result) => {
resolve(result);
}).catch ((err) => {
reject(err)
});
});
})
}
async function runQueryAndQueueNext() {
try
{
await runQuery();
}
catch (err) {
console.error(new Date().toISOString(), 'runQueryAndQueueNext: Error occurred: ', err);
}
console.log(new Date().toISOString(), 'runQueryAndQueueNext: Query complete, queuing next in ' + getQueryInterval() + ' ms');
setTimeout(runQueryAndQueueNext, getQueryInterval())
}
runQueryAndQueueNext();

Related

node.js mysql result into a variable

I've been using mountebank to do some stubbing for performance testing and its an awesome tool. The functional teams have asked if it can be repurposed to support functional testing and I'd said i'd have a look.
What I want to achieve is to select from a mysql database an account number and its account balance and then return the balance to the client (in this case a jmeter harness)
function (request, state, logger) {
logger.info('GBG - getAccountBalance');
var mysql = require('mysql');
var result = '';
var con = mysql.createConnection({
host: "localhost",
user: "user",
password: "password",
database: "customer"
});
con.connect(function(err) {
if (err) throw err;
console.log("Connected!");
});
con.query('select * from accounts', function (err, rows, fields) {
if (err) throw err;
console.log(rows);
console.log('accountNumber is : ', rows[0].accountNumber);
result = rows[0].accountNumber;
});
console.log('result is : ', result);
var response = result;
return {
headers: {
'Content-Type': 'application/xml',
'Connection': 'Keep-Alive'
},
body: response
};
}
The result of the console log is:
result is :
Connected!
[ RowDataPacket { accountNumber: 777777, accountBalance: 777 } ]
accountNumber is : 777777
Not sure what I'm doing wrong and why the result is : lines comes up first despite being later in the code.
Any advice appreciated.
Full disclosure, I've been using mountebank for about two weeks so I'm a real beginner.
The function keyword inside connect and query is called callbacks, and only executed after the function itself is done. so your code would look like:
con.connect(function(err) {
if (err) throw err;
console.log("Connected!");
con.query('select * from accounts', function (err, rows, fields) {
if (err) throw err;
console.log(rows);
console.log('accountNumber is : ', rows[0].accountNumber);
result = rows[0].accountNumber;
console.log('result is : ', result);
var response = result;
});
});
and so on, but you just introduced callback hell to your code.
async is your friend.
EDIT:
following an example:
async.waterfall([
function (callback) {
//do some async function here
con.connect(function(err) {
if (err) throw err;
console.log("Connected!");
//call this when you are done
//you can even pass param to next function
callback(null,true);
});
},function (isConnected,callback1) {
if !(isConnected){
console.log("Connection failed! Skipping Query...")
callback1(null,"Error");
}
//do another async function here:
con.query('select * from accounts', function (err, rows, fields) {
if (err) throw err;
console.log(rows);
console.log('accountNumber is : ', rows[0].accountNumber);
result = rows[0].accountNumber;
callback1(null,"Complete");
});
}
], function (err,result) {
if(result == "Error"){
alert("Someting went wrong!");
}
if(result == "Complete"){
alert("Done!");
}
return 0;
});
note:I haven't written JS for awhile. Written this off of some existing code and haven't been tested. Also, Promise is also something that would help, but haven't looked into personally. BlueBird is a library for that.
The simplest way to get Data form mysql database using Promise and async await.
Get data dynamically by providing id to the SQL query.
With the help of following code snippet. First Your query will get execute fully the other process will execute.
response will be sent after execution of query is fully done. (sometimes response is sent first then execution of query completes)
async function getData(customerId){
let sql = `SELECT * FROM customer_info WHERE customerID = ${customerId}`
await connection.query(sql, (err, result) => {
data = {
CustomerId : result[0].customerID,
FirstName: result[0].FirstName,
LastName: result[0].LastName
}
})
}
function connectToDB(customerId){
return new Promise((resolve, reject) => {
getData(customerId).then(()=>resolve())
})
}
app.get('/customer/:id', (req, res) => {
let customerId = req.params.id
// Caller Function to all functions
async function callerFun(){
await connectToDB(customerId);
res.send("Execution Done");
}
callerFun();
})

Store mysql query rows in variable for later use

I'm doing a monitoring system project in which I have Arduino sensors data being sent to a node.js server (thru GET requests) and then stored in a MySQL DB.
Whenvever I successfully send data to the server, it connects to the MySQL DB and queries the last 5 received records to do some processing.
Therefore, I need to store the rows of those 5 records in a variable for later use. Meaning that I have to get rows from a connection.query in a variable.
I read that the fact that I'm not able to do this is because node.js being async. So my questions are:
Is it possible to do the described tasks the way I'm trying?
If not, is there any other way to do so?
I'm not putting the whole code here but I'm running a separated test that also doesn't run properly. Here it is:
var mysql = require('mysql');
var con = mysql.createConnection({
host : "127.0.0.1",
user : "root",
password: "xxxx",
database: "mydb",
port : 3306
});
var queryString = "SELECT id, temp1, temp2, temp3, temp4, level_ice_bank, flow FROM tempdata ORDER BY id DESC LIMIT 5";
con.connect(function(err) {
if (err) throw err;
});
var result_arr = [];
function setValue (value) {
result_arr = value;
}
con.query(queryString, function (err, rows, fields) {
if (err) throw err;
else {
//console.log(rows);
setValue(rows);
}
});
console.log(result_arr);
It logs:
[]
But if I uncomment console.log(rows); it logs what I need to store in the variable result_arr.
Thanks in advance to all.
You're seeing this behaviour because con.query(...) is an asynchronous function. That means that:
console.log(result_arr);
Runs before:
con.query(queryString, function (err, rows, fields) {
if (err) throw err;
else {
//console.log(rows);
setValue(rows);
}
});
(Specifically, the setValue(rows) call)
To fix this in your example, you can just do:
con.query(queryString, function (err, rows, fields) {
if (err) throw err;
else {
setValue(rows);
console.log(result_arr);
}
});
If you want to do more than just log the data, then you can call a function which depends on result_arr from the con.query callback, like this:
con.query(queryString, function (err, rows, fields) {
if (err) throw err;
else {
setValue(rows);
doCleverStuffWithData();
}
});
function doCleverStuffWithData() {
// Do something with result_arr
}

Use promise to process MySQL return value in node.js

I have a python background and is currently migrating to node.js. I have problem adjusting to node.js due to its asynchronous nature.
For example, I am trying to return a value from a MySQL function.
function getLastRecord(name)
{
var connection = getMySQL_connection();
var query_str =
"SELECT name, " +
"FROM records " +
"WHERE (name = ?) " +
"LIMIT 1 ";
var query_var = [name];
var query = connection.query(query_str, query_var, function (err, rows, fields) {
//if (err) throw err;
if (err) {
//throw err;
console.log(err);
logger.info(err);
}
else {
//console.log(rows);
return rows;
}
}); //var query = connection.query(query_str, function (err, rows, fields) {
}
var rows = getLastRecord('name_record');
console.log(rows);
After some reading up, I realize the above code cannot work and I need to return a promise due to node.js's asynchronous nature. I cannot write node.js code like python. How do I convert getLastRecord() to return a promise and how do I handle the returned value?
In fact, what I want to do is something like this;
if (getLastRecord() > 20)
{
console.log("action");
}
How can this be done in node.js in a readable way?
I would like to see how promises can be implemented in this case using bluebird.
This is gonna be a little scattered, forgive me.
First, assuming this code uses the mysql driver API correctly, here's one way you could wrap it to work with a native promise:
function getLastRecord(name)
{
return new Promise(function(resolve, reject) {
// The Promise constructor should catch any errors thrown on
// this tick. Alternately, try/catch and reject(err) on catch.
var connection = getMySQL_connection();
var query_str =
"SELECT name, " +
"FROM records " +
"WHERE (name = ?) " +
"LIMIT 1 ";
var query_var = [name];
connection.query(query_str, query_var, function (err, rows, fields) {
// Call reject on error states,
// call resolve with results
if (err) {
return reject(err);
}
resolve(rows);
});
});
}
getLastRecord('name_record').then(function(rows) {
// now you have your rows, you can see if there are <20 of them
}).catch((err) => setImmediate(() => { throw err; })); // Throw async to escape the promise chain
So one thing: You still have callbacks. Callbacks are just functions that you hand to something to call at some point in the future with arguments of its choosing. So the function arguments in xs.map(fn), the (err, result) functions seen in node and the promise result and error handlers are all callbacks. This is somewhat confused by people referring to a specific kind of callback as "callbacks," the ones of (err, result) used in node core in what's called "continuation-passing style", sometimes called "nodebacks" by people that don't really like them.
For now, at least (async/await is coming eventually), you're pretty much stuck with callbacks, regardless of whether you adopt promises or not.
Also, I'll note that promises aren't immediately, obviously helpful here, as you still have a callback. Promises only really shine when you combine them with Promise.all and promise accumulators a la Array.prototype.reduce. But they do shine sometimes, and they are worth learning.
I have modified your code to use Q(NPM module) promises.
I Assumed your 'getLastRecord()' function that you specified in above snippet works correctly.
You can refer following link to get hold of Q module
Click here : Q documentation
var q = require('q');
function getLastRecord(name)
{
var deferred = q.defer(); // Use Q
var connection = getMySQL_connection();
var query_str =
"SELECT name, " +
"FROM records " +
"WHERE (name = ?) " +
"LIMIT 1 ";
var query_var = [name];
var query = connection.query(query_str, query_var, function (err, rows, fields) {
//if (err) throw err;
if (err) {
//throw err;
deferred.reject(err);
}
else {
//console.log(rows);
deferred.resolve(rows);
}
}); //var query = connection.query(query_str, function (err, rows, fields) {
return deferred.promise;
}
// Call the method like this
getLastRecord('name_record')
.then(function(rows){
// This function get called, when success
console.log(rows);
},function(error){
// This function get called, when error
console.log(error);
});
I am new to Node.js and promises. I was searching for a while for something that will meet my needs and this is what I ended up using after combining several examples I found. I wanted the ability to acquire connection per query and release it right after the query finishes (querySql), or to get a connection from pool and use it within Promise.using scope, or release it whenever I would like it (getSqlConnection).
Using this method you can concat several queries one after another without nesting them.
db.js
var mysql = require('mysql');
var Promise = require("bluebird");
Promise.promisifyAll(mysql);
Promise.promisifyAll(require("mysql/lib/Connection").prototype);
Promise.promisifyAll(require("mysql/lib/Pool").prototype);
var pool = mysql.createPool({
host: 'my_aws_host',
port: '3306',
user: 'my_user',
password: 'my_password',
database: 'db_name'
});
function getSqlConnection() {
return pool.getConnectionAsync().disposer(function (connection) {
console.log("Releasing connection back to pool")
connection.release();
});
}
function querySql (query, params) {
return Promise.using(getSqlConnection(), function (connection) {
console.log("Got connection from pool");
if (typeof params !== 'undefined'){
return connection.queryAsync(query, params);
} else {
return connection.queryAsync(query);
}
});
};
module.exports = {
getSqlConnection : getSqlConnection,
querySql : querySql
};
usage_route.js
var express = require('express');
var router = express.Router();
var dateFormat = require('dateformat');
var db = require('../my_modules/db');
var getSqlConnection = db.getSqlConnection;
var querySql = db.querySql;
var Promise = require("bluebird");
function retrieveUser(token) {
var userQuery = "select id, email from users where token = ?";
return querySql(userQuery, [token])
.then(function(rows){
if (rows.length == 0) {
return Promise.reject("did not find user");
}
var user = rows[0];
return user;
});
}
router.post('/', function (req, res, next) {
Promise.resolve().then(function () {
return retrieveUser(req.body.token);
})
.then(function (user){
email = user.email;
res.status(200).json({ "code": 0, "message": "success", "email": email});
})
.catch(function (err) {
console.error("got error: " + err);
if (err instanceof Error) {
res.status(400).send("General error");
} else {
res.status(200).json({ "code": 1000, "message": err });
}
});
});
module.exports = router;
I am still a bit new to node, so maybe I missed something let me know how it works out. Instead of triggering async node just forces it on you, so you have to think ahead and plan it.
const mysql = require('mysql');
const db = mysql.createConnection({
host: 'localhost',
user: 'user', password: 'password',
database: 'database',
});
db.connect((err) => {
// you should probably add reject instead of throwing error
// reject(new Error());
if(err){throw err;}
console.log('Mysql: Connected');
});
db.promise = (sql) => {
return new Promise((resolve, reject) => {
db.query(sql, (err, result) => {
if(err){reject(new Error());}
else{resolve(result);}
});
});
};
Here I am using the mysql module like normal, but instead I created a new function to handle the promise ahead of time, by adding it to the db const. (you see this as "connection" in a lot of node examples.
Now lets call a mysql query using the promise.
db.promise("SELECT * FROM users WHERE username='john doe' LIMIT 1;")
.then((result)=>{
console.log(result);
}).catch((err)=>{
console.log(err);
});
What I have found this useful for is when you need to do a second query based on the first query.
db.promise("SELECT * FROM users WHERE username='john doe' LIMIT 1;")
.then((result)=>{
console.log(result);
var sql = "SELECT * FROM friends WHERE username='";
sql = result[0];
sql = "';"
return db.promise(sql);
}).then((result)=>{
console.log(result);
}).catch((err)=>{
console.log(err);
});
You should actually use the mysql variables, but this should at least give you an example of using promises with mysql module.
Also with above you can still continue to use the db.query the normal way anytime within these promises, they just work like normal.
Hope this helps with the triangle of death.
You don't need to use promises, you can use a callback function, something like that:
function getLastRecord(name, next)
{
var connection = getMySQL_connection();
var query_str =
"SELECT name, " +
"FROM records " +
"LIMIT 1 ";
var query_var = [name];
var query = connection.query(query_str, query_var, function (err, rows, fields) {
//if (err) throw err;
if (err) {
//throw err;
console.log(err);
logger.info(err);
next(err);
}
else {
//console.log(rows);
next(null, rows);
}
}); //var query = connection.query(query_str, function (err, rows, fields) {
}
getLastRecord('name_record', function(err, data) {
if(err) {
// handle the error
} else {
// handle your data
}
});
Using the package promise-mysql the logic would be to chain promises using then(function(response){your code})
and
catch(function(response){your code}) to catch errors from the "then" blocks preceeding the catch block.
Following this logic, you will pass query results in objects or arrays using return at the end of the block. The return will help passing the query results to the next block. Then, the result will be found in the function argument (here it is test1). Using this logic you can chain several MySql queries and the code that is required to manipulate the result and do whatever you want.
the Connection object is created to be global because every object and variable created in every block are only local. Don't forget that you can chain more "then" blocks.
var config = {
host : 'host',
user : 'user',
password : 'pass',
database : 'database',
};
var mysql = require('promise-mysql');
var connection;
let thename =""; // which can also be an argument if you embed this code in a function
mysql.createConnection(config
).then(function(conn){
connection = conn;
let test = connection.query('select name from records WHERE name=? LIMIT 1',[thename]);
return test;
}).then(function(test1){
console.log("test1"+JSON.stringify(test1)); // result of previous block
var result = connection.query('select * from users'); // A second query if you want
connection.end();
connection = {};
return result;
}).catch(function(error){
if (connection && connection.end) connection.end();
//logs out the error from the previous block (if there is any issue add a second catch behind this one)
console.log(error);
});
To answer your initial question: How can this be done in node.js in a readable way?
There is a library called co, which gives you the possibility to write async code in a synchronous workflow. Just have a look and npm install co.
The problem you face very often with that approach, is, that you do not get Promise back from all the libraries you like to use. So you have either wrap it yourself (see answer from #Joshua Holbrook) or look for a wrapper (for example: npm install mysql-promise)
(Btw: its on the roadmap for ES7 to have native support for this type of workflow with the keywords async await, but its not yet in node: node feature list.)
This can be achieved quite simply, for example with bluebird, as you asked:
var Promise = require('bluebird');
function getLastRecord(name)
{
return new Promise(function(resolve, reject){
var connection = getMySQL_connection();
var query_str =
"SELECT name, " +
"FROM records " +
"WHERE (name = ?) " +
"LIMIT 1 ";
var query_var = [name];
var query = connection.query(query_str, query_var, function (err, rows, fields) {
//if (err) throw err;
if (err) {
//throw err;
console.log(err);
logger.info(err);
reject(err);
}
else {
resolve(rows);
//console.log(rows);
}
}); //var query = connection.query(query_str, function (err, rows, fields) {
});
}
getLastRecord('name_record')
.then(function(rows){
if (rows > 20) {
console.log("action");
}
})
.error(function(e){console.log("Error handler " + e)})
.catch(function(e){console.log("Catch handler " + e)});
May be helpful for others, extending #Dillon Burnett answer
Using async/await and params
db.promise = (sql, params) => {
return new Promise((resolve, reject) => {
db.query(sql,params, (err, result) => {
if(err){reject(new Error());}
else{resolve(result);}
});
});
};
module.exports = db;
async connection(){
const result = await db.promise("SELECT * FROM users WHERE username=?",[username]);
return result;
}

Nested query in node js using mysql

I am trying following code at node js using mysql but getting error "Cannot enqueue Query after invoking quit.
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'localhost',
user : 'USER',
password : 'PASS',
database : 'DB',
});
connection.connect();
var queryString = 'SELECT * FROM tbl_product';
connection.query(queryString, function(err, rows, fields) {
if (err) throw err;
for (var i in rows) {
console.log('Product Name: ', rows[i].product_name);
var emp_query = 'SELECT * FROM tbl_employer';
connection.query(queryString, function(emp_err, emp_rows, emp_fields) {
if (emp_err) throw emp_err;
for (var e in emp_rows) {
console.log('Employer Name: ', emp_rows[e].company_name);
}
});
}
});
connection.end();
I see two problems in your code:
You're calling connection.end() synchronously, but your queries run in a asynchronous flow. You have to call connection.end() only when you've finished the second query.
You're using a regular for loop to run assynchronous calls (you outter loop).
To accomplish what you're trying to do, you have to consider those assynchronous scenarios. You could use promises or a module like async, that provides you a lot of methods to deal with assyncronous flows, like async.each():
connection.query(queryString, function(err, rows, fields) {
if (err) throw err;
async.each(rows, function (row, callback) {
console.log('Product Name: ', row.product_name);
var emp_query = 'SELECT * FROM tbl_employer';
connection.query(queryString, function(emp_err, emp_rows, emp_fields) {
if (emp_err) callback(emp_err);
for (var e in emp_rows) {
console.log('Employer Name: ', emp_rows[e].company_name);
}
callback();
});
});
}, function (err) {
connection.end();
}
});
Now it will guarantee that connection.end() will just be called when all your queries have finished.
Remove connection end function
==> connection.end();
The problem is connection.end() triggered before your query is not finished yet. Try to put connection.end() to end of outer loop.
connection.query(queryString, function(err, rows, fields) {
if (err) throw err;
for (var i in rows) {
console.log('Product Name: ', rows[i].product_name);
var emp_query = 'SELECT * FROM tbl_employer';
connection.query(queryString, function(emp_err, emp_rows, emp_fields) {
if (emp_err) throw emp_err;
for (var e in emp_rows) {
console.log('Employer Name: ', emp_rows[e].company_name);
}
});
}
connection.end();
});
Hope it will be useful for you.

node.js + mysql connection pooling

I'm trying to figure out how to structure my application to use MySQL most efficent way. I'm using node-mysql module. Other threads here suggested to use connection pooling so i set up a little module mysql.js
var mysql = require('mysql');
var pool = mysql.createPool({
host : 'localhost',
user : 'root',
password : 'root',
database : 'guess'
});
exports.pool = pool;
Now whenever I want to query mysql I require this module and then query the databse
var mysql = require('../db/mysql').pool;
var test = function(req, res) {
mysql.getConnection(function(err, conn){
conn.query("select * from users", function(err, rows) {
res.json(rows);
})
})
}
Is this good approach? I couldn't really find too much examples of using mysql connections besides very simple one where everything is done in main app.js script so I don't really know what the convention / best practices are.
Should I always use connection.end() after each query? What if I forget about it somewhere?
How to rewrite the exports part of my mysql module to return just a connection so I don't have to write getConnection() every time?
It's a good approach.
If you just want to get a connection add the following code to your module where the pool is in:
var getConnection = function(callback) {
pool.getConnection(function(err, connection) {
callback(err, connection);
});
};
module.exports = getConnection;
You still have to write getConnection every time. But you could save the connection in the module the first time you get it.
Don't forget to end the connection when you are done using it:
connection.release();
You should avoid using pool.getConnection() if you can. If you call pool.getConnection(), you must call connection.release() when you are done using the connection. Otherwise, your application will get stuck waiting forever for connections to be returned to the pool once you hit the connection limit.
For simple queries, you can use pool.query(). This shorthand will automatically call connection.release() for you—even in error conditions.
function doSomething(cb) {
pool.query('SELECT 2*2 "value"', (ex, rows) => {
if (ex) {
cb(ex);
} else {
cb(null, rows[0].value);
}
});
}
However, in some cases you must use pool.getConnection(). These cases include:
Making multiple queries within a transaction.
Sharing data objects such as temporary tables between subsequent queries.
If you must use pool.getConnection(), ensure you call connection.release() using a pattern similar to below:
function doSomething(cb) {
pool.getConnection((ex, connection) => {
if (ex) {
cb(ex);
} else {
// Ensure that any call to cb releases the connection
// by wrapping it.
cb = (cb => {
return function () {
connection.release();
cb.apply(this, arguments);
};
})(cb);
connection.beginTransaction(ex => {
if (ex) {
cb(ex);
} else {
connection.query('INSERT INTO table1 ("value") VALUES (\'my value\');', ex => {
if (ex) {
cb(ex);
} else {
connection.query('INSERT INTO table2 ("value") VALUES (\'my other value\')', ex => {
if (ex) {
cb(ex);
} else {
connection.commit(ex => {
cb(ex);
});
}
});
}
});
}
});
}
});
}
I personally prefer to use Promises and the useAsync() pattern. This pattern combined with async/await makes it a lot harder to accidentally forget to release() the connection because it turns your lexical scoping into an automatic call to .release():
async function usePooledConnectionAsync(actionAsync) {
const connection = await new Promise((resolve, reject) => {
pool.getConnection((ex, connection) => {
if (ex) {
reject(ex);
} else {
resolve(connection);
}
});
});
try {
return await actionAsync(connection);
} finally {
connection.release();
}
}
async function doSomethingElse() {
// Usage example:
const result = await usePooledConnectionAsync(async connection => {
const rows = await new Promise((resolve, reject) => {
connection.query('SELECT 2*4 "value"', (ex, rows) => {
if (ex) {
reject(ex);
} else {
resolve(rows);
}
});
});
return rows[0].value;
});
console.log(`result=${result}`);
}
You will find this wrapper usefull :)
var pool = mysql.createPool(config.db);
exports.connection = {
query: function () {
var queryArgs = Array.prototype.slice.call(arguments),
events = [],
eventNameIndex = {};
pool.getConnection(function (err, conn) {
if (err) {
if (eventNameIndex.error) {
eventNameIndex.error();
}
}
if (conn) {
var q = conn.query.apply(conn, queryArgs);
q.on('end', function () {
conn.release();
});
events.forEach(function (args) {
q.on.apply(q, args);
});
}
});
return {
on: function (eventName, callback) {
events.push(Array.prototype.slice.call(arguments));
eventNameIndex[eventName] = callback;
return this;
}
};
}
};
Require it, use it like this:
db.connection.query("SELECT * FROM `table` WHERE `id` = ? ", row_id)
.on('result', function (row) {
setData(row);
})
.on('error', function (err) {
callback({error: true, err: err});
});
I am using this base class connection with mysql:
"base.js"
var mysql = require("mysql");
var pool = mysql.createPool({
connectionLimit : 10,
host: Config.appSettings().database.host,
user: Config.appSettings().database.username,
password: Config.appSettings().database.password,
database: Config.appSettings().database.database
});
var DB = (function () {
function _query(query, params, callback) {
pool.getConnection(function (err, connection) {
if (err) {
connection.release();
callback(null, err);
throw err;
}
connection.query(query, params, function (err, rows) {
connection.release();
if (!err) {
callback(rows);
}
else {
callback(null, err);
}
});
connection.on('error', function (err) {
connection.release();
callback(null, err);
throw err;
});
});
};
return {
query: _query
};
})();
module.exports = DB;
Just use it like that:
var DB = require('../dal/base.js');
DB.query("select * from tasks", null, function (data, error) {
callback(data, error);
});
When you are done with a connection, just call connection.release() and the connection will return to the pool, ready to be used again by someone else.
var mysql = require('mysql');
var pool = mysql.createPool(...);
pool.getConnection(function(err, connection) {
// Use the connection
connection.query('SELECT something FROM sometable', function (error, results, fields) {
// And done with the connection.
connection.release();
// Handle error after the release.
if (error) throw error;
// Don't use the connection here, it has been returned to the pool.
});
});
If you would like to close the connection and remove it from the pool, use connection.destroy() instead. The pool will create a new connection the next time one is needed.
Source: https://github.com/mysqljs/mysql
You can use this format as I used
const mysql = require('mysql');
const { HOST, USERNAME, PASSWORD, DBNAME, PORT } = process.env;
console.log();
const conn = mysql.createPool({
host: HOST,
user: USERNAME,
password: PASSWORD,
database: DBNAME
}, { debug: true });
conn.query('SELECT 1 + 1 AS solution', function (error, results, fields) {
if (error) throw error;
console.log('Db is connected - The solution is: ', results[0].solution);
});
module.exports = conn;
Using the standard mysql.createPool(), connections are lazily created by the pool. If you configure the pool to allow up to 100 connections, but only ever use 5 simultaneously, only 5 connections will be made. However if you configure it for 500 connections and use all 500 they will remain open for the durations of the process, even if they are idle!
This means if your MySQL Server max_connections is 510 your system will only have 10 mySQL connections available until your MySQL Server closes them (depends on what you have set your wait_timeout to) or your application closes! The only way to free them up is to manually close the connections via the pool instance or close the pool.
mysql-connection-pool-manager module was created to fix this issue and automatically scale the number of connections dependant on the load. Inactive connections are closed and idle connection pools are eventually closed if there has not been any activity.
// Load modules
const PoolManager = require('mysql-connection-pool-manager');
// Options
const options = {
...example settings
}
// Initialising the instance
const mySQL = PoolManager(options);
// Accessing mySQL directly
var connection = mySQL.raw.createConnection({
host : 'localhost',
user : 'me',
password : 'secret',
database : 'my_db'
});
// Initialising connection
connection.connect();
// Performing query
connection.query('SELECT 1 + 1 AS solution', function (error, results, fields) {
if (error) throw error;
console.log('The solution is: ', results[0].solution);
});
// Ending connection
connection.end();
Ref: https://www.npmjs.com/package/mysql-connection-pool-manager
i always use connection.relase(); after pool.getconnetion like
pool.getConnection(function (err, connection) {
connection.release();
if (!err)
{
console.log('*** Mysql Connection established with ', config.database, ' and connected as id ' + connection.threadId);
//CHECKING USERNAME EXISTENCE
email = receivedValues.email
connection.query('SELECT * FROM users WHERE email = ?', [email],
function (err, rows) {
if (!err)
{
if (rows.length == 1)
{
if (bcrypt.compareSync(req.body.password, rows[0].password))
{
var alldata = rows;
var userid = rows[0].id;
var tokendata = (receivedValues, userid);
var token = jwt.sign(receivedValues, config.secret, {
expiresIn: 1440 * 60 * 30 // expires in 1440 minutes
});
console.log("*** Authorised User");
res.json({
"code": 200,
"status": "Success",
"token": token,
"userData": alldata,
"message": "Authorised User!"
});
logger.info('url=', URL.url, 'Responce=', 'User Signin, username', req.body.email, 'User Id=', rows[0].id);
return;
}
else
{
console.log("*** Redirecting: Unauthorised User");
res.json({"code": 200, "status": "Fail", "message": "Unauthorised User!"});
logger.error('*** Redirecting: Unauthorised User');
return;
}
}
else
{
console.error("*** Redirecting: No User found with provided name");
res.json({
"code": 200,
"status": "Error",
"message": "No User found with provided name"
});
logger.error('url=', URL.url, 'No User found with provided name');
return;
}
}
else
{
console.log("*** Redirecting: Error for selecting user");
res.json({"code": 200, "status": "Error", "message": "Error for selecting user"});
logger.error('url=', URL.url, 'Error for selecting user', req.body.email);
return;
}
});
connection.on('error', function (err) {
console.log('*** Redirecting: Error Creating User...');
res.json({"code": 200, "status": "Error", "message": "Error Checking Username Duplicate"});
return;
});
}
else
{
Errors.Connection_Error(res);
}
});