I'm trying to figure out how to structure my application to use MySQL most efficent way. I'm using node-mysql module. Other threads here suggested to use connection pooling so i set up a little module mysql.js
var mysql = require('mysql');
var pool = mysql.createPool({
host : 'localhost',
user : 'root',
password : 'root',
database : 'guess'
});
exports.pool = pool;
Now whenever I want to query mysql I require this module and then query the databse
var mysql = require('../db/mysql').pool;
var test = function(req, res) {
mysql.getConnection(function(err, conn){
conn.query("select * from users", function(err, rows) {
res.json(rows);
})
})
}
Is this good approach? I couldn't really find too much examples of using mysql connections besides very simple one where everything is done in main app.js script so I don't really know what the convention / best practices are.
Should I always use connection.end() after each query? What if I forget about it somewhere?
How to rewrite the exports part of my mysql module to return just a connection so I don't have to write getConnection() every time?
It's a good approach.
If you just want to get a connection add the following code to your module where the pool is in:
var getConnection = function(callback) {
pool.getConnection(function(err, connection) {
callback(err, connection);
});
};
module.exports = getConnection;
You still have to write getConnection every time. But you could save the connection in the module the first time you get it.
Don't forget to end the connection when you are done using it:
connection.release();
You should avoid using pool.getConnection() if you can. If you call pool.getConnection(), you must call connection.release() when you are done using the connection. Otherwise, your application will get stuck waiting forever for connections to be returned to the pool once you hit the connection limit.
For simple queries, you can use pool.query(). This shorthand will automatically call connection.release() for you—even in error conditions.
function doSomething(cb) {
pool.query('SELECT 2*2 "value"', (ex, rows) => {
if (ex) {
cb(ex);
} else {
cb(null, rows[0].value);
}
});
}
However, in some cases you must use pool.getConnection(). These cases include:
Making multiple queries within a transaction.
Sharing data objects such as temporary tables between subsequent queries.
If you must use pool.getConnection(), ensure you call connection.release() using a pattern similar to below:
function doSomething(cb) {
pool.getConnection((ex, connection) => {
if (ex) {
cb(ex);
} else {
// Ensure that any call to cb releases the connection
// by wrapping it.
cb = (cb => {
return function () {
connection.release();
cb.apply(this, arguments);
};
})(cb);
connection.beginTransaction(ex => {
if (ex) {
cb(ex);
} else {
connection.query('INSERT INTO table1 ("value") VALUES (\'my value\');', ex => {
if (ex) {
cb(ex);
} else {
connection.query('INSERT INTO table2 ("value") VALUES (\'my other value\')', ex => {
if (ex) {
cb(ex);
} else {
connection.commit(ex => {
cb(ex);
});
}
});
}
});
}
});
}
});
}
I personally prefer to use Promises and the useAsync() pattern. This pattern combined with async/await makes it a lot harder to accidentally forget to release() the connection because it turns your lexical scoping into an automatic call to .release():
async function usePooledConnectionAsync(actionAsync) {
const connection = await new Promise((resolve, reject) => {
pool.getConnection((ex, connection) => {
if (ex) {
reject(ex);
} else {
resolve(connection);
}
});
});
try {
return await actionAsync(connection);
} finally {
connection.release();
}
}
async function doSomethingElse() {
// Usage example:
const result = await usePooledConnectionAsync(async connection => {
const rows = await new Promise((resolve, reject) => {
connection.query('SELECT 2*4 "value"', (ex, rows) => {
if (ex) {
reject(ex);
} else {
resolve(rows);
}
});
});
return rows[0].value;
});
console.log(`result=${result}`);
}
You will find this wrapper usefull :)
var pool = mysql.createPool(config.db);
exports.connection = {
query: function () {
var queryArgs = Array.prototype.slice.call(arguments),
events = [],
eventNameIndex = {};
pool.getConnection(function (err, conn) {
if (err) {
if (eventNameIndex.error) {
eventNameIndex.error();
}
}
if (conn) {
var q = conn.query.apply(conn, queryArgs);
q.on('end', function () {
conn.release();
});
events.forEach(function (args) {
q.on.apply(q, args);
});
}
});
return {
on: function (eventName, callback) {
events.push(Array.prototype.slice.call(arguments));
eventNameIndex[eventName] = callback;
return this;
}
};
}
};
Require it, use it like this:
db.connection.query("SELECT * FROM `table` WHERE `id` = ? ", row_id)
.on('result', function (row) {
setData(row);
})
.on('error', function (err) {
callback({error: true, err: err});
});
I am using this base class connection with mysql:
"base.js"
var mysql = require("mysql");
var pool = mysql.createPool({
connectionLimit : 10,
host: Config.appSettings().database.host,
user: Config.appSettings().database.username,
password: Config.appSettings().database.password,
database: Config.appSettings().database.database
});
var DB = (function () {
function _query(query, params, callback) {
pool.getConnection(function (err, connection) {
if (err) {
connection.release();
callback(null, err);
throw err;
}
connection.query(query, params, function (err, rows) {
connection.release();
if (!err) {
callback(rows);
}
else {
callback(null, err);
}
});
connection.on('error', function (err) {
connection.release();
callback(null, err);
throw err;
});
});
};
return {
query: _query
};
})();
module.exports = DB;
Just use it like that:
var DB = require('../dal/base.js');
DB.query("select * from tasks", null, function (data, error) {
callback(data, error);
});
When you are done with a connection, just call connection.release() and the connection will return to the pool, ready to be used again by someone else.
var mysql = require('mysql');
var pool = mysql.createPool(...);
pool.getConnection(function(err, connection) {
// Use the connection
connection.query('SELECT something FROM sometable', function (error, results, fields) {
// And done with the connection.
connection.release();
// Handle error after the release.
if (error) throw error;
// Don't use the connection here, it has been returned to the pool.
});
});
If you would like to close the connection and remove it from the pool, use connection.destroy() instead. The pool will create a new connection the next time one is needed.
Source: https://github.com/mysqljs/mysql
You can use this format as I used
const mysql = require('mysql');
const { HOST, USERNAME, PASSWORD, DBNAME, PORT } = process.env;
console.log();
const conn = mysql.createPool({
host: HOST,
user: USERNAME,
password: PASSWORD,
database: DBNAME
}, { debug: true });
conn.query('SELECT 1 + 1 AS solution', function (error, results, fields) {
if (error) throw error;
console.log('Db is connected - The solution is: ', results[0].solution);
});
module.exports = conn;
Using the standard mysql.createPool(), connections are lazily created by the pool. If you configure the pool to allow up to 100 connections, but only ever use 5 simultaneously, only 5 connections will be made. However if you configure it for 500 connections and use all 500 they will remain open for the durations of the process, even if they are idle!
This means if your MySQL Server max_connections is 510 your system will only have 10 mySQL connections available until your MySQL Server closes them (depends on what you have set your wait_timeout to) or your application closes! The only way to free them up is to manually close the connections via the pool instance or close the pool.
mysql-connection-pool-manager module was created to fix this issue and automatically scale the number of connections dependant on the load. Inactive connections are closed and idle connection pools are eventually closed if there has not been any activity.
// Load modules
const PoolManager = require('mysql-connection-pool-manager');
// Options
const options = {
...example settings
}
// Initialising the instance
const mySQL = PoolManager(options);
// Accessing mySQL directly
var connection = mySQL.raw.createConnection({
host : 'localhost',
user : 'me',
password : 'secret',
database : 'my_db'
});
// Initialising connection
connection.connect();
// Performing query
connection.query('SELECT 1 + 1 AS solution', function (error, results, fields) {
if (error) throw error;
console.log('The solution is: ', results[0].solution);
});
// Ending connection
connection.end();
Ref: https://www.npmjs.com/package/mysql-connection-pool-manager
i always use connection.relase(); after pool.getconnetion like
pool.getConnection(function (err, connection) {
connection.release();
if (!err)
{
console.log('*** Mysql Connection established with ', config.database, ' and connected as id ' + connection.threadId);
//CHECKING USERNAME EXISTENCE
email = receivedValues.email
connection.query('SELECT * FROM users WHERE email = ?', [email],
function (err, rows) {
if (!err)
{
if (rows.length == 1)
{
if (bcrypt.compareSync(req.body.password, rows[0].password))
{
var alldata = rows;
var userid = rows[0].id;
var tokendata = (receivedValues, userid);
var token = jwt.sign(receivedValues, config.secret, {
expiresIn: 1440 * 60 * 30 // expires in 1440 minutes
});
console.log("*** Authorised User");
res.json({
"code": 200,
"status": "Success",
"token": token,
"userData": alldata,
"message": "Authorised User!"
});
logger.info('url=', URL.url, 'Responce=', 'User Signin, username', req.body.email, 'User Id=', rows[0].id);
return;
}
else
{
console.log("*** Redirecting: Unauthorised User");
res.json({"code": 200, "status": "Fail", "message": "Unauthorised User!"});
logger.error('*** Redirecting: Unauthorised User');
return;
}
}
else
{
console.error("*** Redirecting: No User found with provided name");
res.json({
"code": 200,
"status": "Error",
"message": "No User found with provided name"
});
logger.error('url=', URL.url, 'No User found with provided name');
return;
}
}
else
{
console.log("*** Redirecting: Error for selecting user");
res.json({"code": 200, "status": "Error", "message": "Error for selecting user"});
logger.error('url=', URL.url, 'Error for selecting user', req.body.email);
return;
}
});
connection.on('error', function (err) {
console.log('*** Redirecting: Error Creating User...');
res.json({"code": 200, "status": "Error", "message": "Error Checking Username Duplicate"});
return;
});
}
else
{
Errors.Connection_Error(res);
}
});
Related
I have a lambda function that connects to mysql and runs a set of queries, but I actually have a sequence of mysql queries that need to run one after another. I.e., the value of one query is used in the next query, etc.
Currently, I have a bunch of callbacks to achieve this, but this is leading to "callback hell". How would I rewrite this to use async / await?
My code is actually split into 2 files. The first file does an initial query, and then the value is passed into a function of the second file. Please note that the mysql node_module is included but not shown here. The AWS API gateway calls index.js
// index.js
var mysql = require('mysql'); // from node_modules
var config = require('./config.json');
var dashboard = require('./dashboard.js');
var pool = mysql.createPool({
host : config.dbhost,
user : config.dbuser,
password : config.dbpassword,
database : config.dbname
});
exports.handler = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
pool.getConnection(function(err, connection) {
// check for mysql connection error first
if ( err ) {
throw err;
}
let qry = "select id from some_table where some_field = ?";
let someval = event.queryStringParameters.someval;
connection.query(qry, [someval], function(error, result) {
if ( error ) {
throw err;
}
else {
dashboard.processRequest(connection, callback, event, res[0].id);
}
});
});
}
// dashboard.js
module.exports = {
jsonResponse: function(results) {
return {
"statusCode": 200,
"body": JSON.stringify({ results }),
"isBase64Encoded": false,
"headers": {
"Access-Control-Allow-Origin": "*"
}
};
},
processRequest: function(connection, callback, event, val) {
let qry = "update first_table set some_field = ?";
connection.query(qry, [val], function(error, results) {
// return to client if error
if (error) {
callback(null, this.jsonResponse(error));
}
else {
// assume that this table must be update AFTER the previous statement
qry = "select id from second_table where some_field = ?";
connection.query(qry, [val], function(error1, results1) {
// return to client if error
if ( error1 ) {
callback(null, this.jsonResponse(error1));
}
qry = "update third_table set some_field = ? where id = ?";
connection.query(qry, [results1[0].id], function(error2, results2) {
// release connection when all queries are completed
connection.release();
if ( error2 ) {
callback(null, this.jsonResponse(error2));
}
else {
callback(null, this.jsonResponse(results2));
}
});
});
}
});
}
};
It was suggested to me that something like the below code might work. Unfortunately, it does not. I was curious to know why using try...catch blocks in the way shown below is not working, and is it the same thing as what you've shown, but just written differently?
// index.js
var mysql = require('mysql'); // from node_modules
var config = require('./config.json');
var dashboard = require('./dashboard.js');
var pool = mysql.createPool({
host : config.dbhost,
user : config.dbuser,
password : config.dbpassword,
database : config.dbname
});
exports.handler = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
pool.getConnection(function(err, connection) {
// check for mysql connection error first
if ( err ) {
throw err;
}
let qry = "select id from users where username = ? limit 1;";
let username = event.queryStringParameters.username;
try {
let res = await connection.query(qry, [event.queryStringParameters.username]);
dashboard.processRequest(connection, callback, event, res[0].id);
} catch (err) {
console.log(err);
}
});
}
// dashboard.js
module.exports = {
jsonResponse: function (results) {
return {
"statusCode": 200,
"body": JSON.stringify({results}),
"isBase64Encoded": false,
"headers": {
"Access-Control-Allow-Origin": "*"
}
};
},
processRequest: async function (connection, callback, event, val) {
let qry = "update first_table set some_field = ?";
try {
let results = await connection.query(qry, [val]);
qry = "select id from second_table where some_field = ?";
try {
let results1 = await connection.query(qry, [val]);
qry = "update third_table set some_field = ? where id = ?";
try {
let results2 = await connection.query(qry, [results1[0].id]);
connection.release();
callback(null, this.jsonResponse(results2));
} catch (error2) {
callback(null, this.jsonResponse(error2));
}
} catch (error1) {
callback(null, this.jsonResponse(error1));
}
} catch (error) {
callback(null, this.jsonResponse(error));
}
}
};
We need use promises.
Typically I follow this approach:
Create one async method mainProcess and have bunch of methods step by step called with in that method. one after the other with await or all at once.
Each async method getConnection and runQuery in this case, called within mainProcess must a Promise.
If any errors from these methods i.e promise rejects from individual methods, goes in catch block of mainProcess().
If no errors, all methods within mainProcess gets executed and goes to then block of mainProcess()
Your code can be refactored like this (just wrote in an editor untested)
var pool = mysql.createPool({
host: config.dbhost,
user: config.dbuser,
password: config.dbpassword,
database: config.dbname,
});
exports.handler = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
/**
* Main Lambda Process
*/
const mainProcess = async () => {
// Get Connection
let connection = await getConnection();
// Run Step 1
let qry1 = "select id from some_table1 where some_field = ?";
const response1 = await runQuery(connection, qry1, { someFiledValue: 1222})
// Run Step 2
let qry2 = "select id from some_table2 where some_field = ?";
const resonse2 = await runQuery(connection, qry2, { someFiledValue: 1222})
return 'All Good';
});
}
mainProcess()
.then(result => {
// All lambda success messages are returned from here
callback(null, result);
})
.catch(error => {
// All lambda errors thrown from here
callback(error);
});
};
function getConnection(qry, parms) {
return new Promise((resolve, reject) => {
pool.getConnection(function (error, connection) {
if (error) {
// return to client if error
reject(error);
} else {
// Return response
resolve(connection);
}
});
});
}
/**
* Runs a query, either resolves or rejects
*/
function runQuery(connection, qry, parms) {
return new Promise((resolve, reject) => {
connection.query(qry, [val], function (error, results) {
if (error) {
// return to client if error
reject(error);
} else {
// Return response
resolve(result);
}
});
});
}
When you're dealing with a lambda function which performs an async task you have two solutions:
you can use non async handlers, in which case you need to invoke "callback" on promises as you did in your example
you can use async handlers, which does not requires the "callback" input and that allows you to write async/await code, like the following example:
const mysql = require('mysql2/promise');
exports.handler = async(event, context) => {
//get path variable
const { pathVar } = event.pathParameters;
// get connection
const connection = await mysql.createConnection({
host : process.env.RDS_HOSTNAME,
user : process.env.RDS_USERNAME,
password : process.env.RDS_PASSWORD,
database : process.env.RDS_DB_NAME
});
// get text query
const textQuery = `SELECT field FROM entity WHERE attribute = ${pathVar}`;
// get res
const results = await connection.execute(textQuery);
return {
"statusCode": 200,
"body": results,
"isBase64Encoded": false
}
}
You can have a look at the AWS docs: https://docs.aws.amazon.com/lambda/latest/dg/nodejs-handler.html
I wanted to know if it is possible to use async await when using transaction with mysql. I created the snippet below using the mysql documentation but the problem below is that if I have multiple queries and I want them to fire one after the another there is no way to specify it without creating a call back hell. Has anyone been able to do it with async await?
pool.getConnection(function(err, connection) {
connection.beginTransaction(function(err) {
if (err) { throw err; }
connection.query('INSERT INTO posts SET title=?', title, function (error, results, fields) {
if (error) {
return connection.rollback(function() {
throw error;
});
}
var log = 'Post ' + results.insertId + ' added';
connection.query('INSERT INTO log SET data=?', log, function (error, results, fields) {
if (error) {
return connection.rollback(function() {
throw error;
});
}
connection.commit(function(err) {
if (err) {
return connection.rollback(function() {
throw err;
});
}
//console.log('success!');
connection.release();
});
});
});
});
})
I am using a connection pool by the way as you can see above. If you want to know how the pool is created here is the code:
let pool = mysql.createPool({
connectionLimit: 20,
host: keys.connection.host,
user: keys.connection.user,
password: keys.connection.password,
database: keys.connection.database,
dateStrings: true
// debug:true //Set this to true for verbose debugging. Leaving this to default for now cause it is creating too many messages at my console
})
pool.getConnection((err, connection) => {
if (err) {
if (err.code === 'PROTOCOL_CONNECTION_LOST') {
console.error('Database connection was closed.')
}
if (err.code === 'ER_CON_COUNT_ERROR') {
console.error('Database has too many connections.')
}
if (err.code === 'ECONNREFUSED') {
console.error('Database connection was refused.')
}
}
// if (connection) connection.release()
return
})
pool.query = util.promisify(pool.query)
module.exports = pool
If you don't need the returned value of each query/request. You can use promise in every request and store it in an array and do promise all at the end:
let requestArray = []
// push each request to requestArray
// then do
Promise.all(requestArray).then((values) => {
console.log(values);
});
if you do, you need something like async waterfall : async-waterfall : npmjs.com/package/async-waterfall
Hope that helps
I have a basic node.js app running as a service on RaspPi.
There should be two important issues:
Check MySQL database every 3 secs and get Status
Never throw even if internet connection is lost or MySQL server stops etc.
This code runs well but app closes and throw an error if MySQL server stops. It needs to be run even if there is an any kind of error. (No need to log and get error type)
And also; is this every 3 secs connection algorithm correct for performance?
var mysql = require('mysql');
var con = mysql.createConnection({
host: "192.168.1.100",
user: "test",
password: "test",
database: "test"
});
var Gpio = require('onoff').Gpio;
var LED = new Gpio(4, 'out'); //use GPIO pin 4
var getStatus = setInterval(checkStatus, 3000);
function checkStatus() {
try
{
con.connect(function(err) {
//if (err) throw err;
con.query("SELECT device_status FROM Device_Status WHERE
device_id='device01'", function (err, result, fields) {
if (err)
{
//console.log(err);
}
else
{
//console.log(result[0].device_status);
if(result[0].device_status == "1")
{
LED.writeSync(1);
}
else
{
LED.writeSync(0);
}
}
});
});
}
catch(err){ //console.log(err);
}
}
Have you thought about pooling connections? Then you would connect on every query.
connect() {
return new Promise((resolve, reject) => {
pool = mysql.createPool({
connectionLimit: 10,
host : this.host,
user : this.user,
password : this.password,
database : this.database
});
resolve(pool);
});
}
Then you would search like this:
search(collection, searchStr) {
return new Promise((resolve, reject) => {
pool.getConnection((err, connection) => {
if (err) {
resolve(err);
} else {
var sql = "SELECT * FROM " + collection
if (searchStr)
sql += " WHERE " + searchStr;
connection.query(sql, (err, result) => {
if (err) reject(err);
resolve(result);
connection.release();
});
}
});
});
}
Can you check this code from my repo https://github.com/MathewJohn1414/node-mysql-quickstart
. It is uses the MySQL connection pool and the connection errors are also handled.
I have a lot of urls, for every url I call the function load(url), this function parse the html, extracts the needed data and builds a bulk insert query as you can see in my test.js code. The problem is, if I have to many urls (like 100+), I get a Error: ER_LOCK_DEADLOCK from mysql. I tried to use async.queue but this is somehow not working (I don't know why, maybe I am using is wrongly). How can I run many urls + queries one after another, avoiding parallel execution which I think resulted in a deadlock? Even using async.queue results to a DEADLOCK (not always).
test.js
const request = require('request');
const async = require('async');
const pool = require('./database');
const urls = [
'https://www.quora.com/What-is-the-best-way-to-have-delayed-job-queue-with-node-js',
'https://de.wikipedia.org/wiki/Reinhardt-Zimmermann-L%C3%B6sung',
'https://towardsdatascience.com/the-5-clustering-algorithms-data-scientists-need-to-know-a36d136ef68'
]
let load = function(url) {
request({url: url}, function(error, response, html) {
if(!error) {
console.log(html);
/**
* 1. Parse HTML
* 2. Create Array of Values
* 3. Call pool.query(sql, [values], function(error) { ... })
*/
let data = [{}];
let sql = "INSERT IGNORE INTO tbl_test (title, content) VALUES ?";
let values = [];
data.forEach((item) => { values.push(item) });
pool.query(sql, [values], function(error) {
if(error) throw error;
})
} else {
console.log("handle error...");
}
})
}
let jobs = []
/*urls.forEach((url) => {
//jobs.push(load(url)); // --> Works but fails if the urls list is to big -> mysql deadlock error!
jobs.push(function(callback) { callback(load(url)) });
})*/
let q = async.queue(function(task, callback) {
console.log("Task:", task.uri);
callback();
})
q.drain = function() {
console.log('all task completed');
pool.end();
}
urls.forEach((url) => {
q.push({uri: url}, function(err) {
console.log('finished processing ...')
});
});
databse.js
require('dotenv').config();
const mysql = require('mysql');
let pool = mysql.createPool(
{
connectionLimit: 10,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME
}
);
pool.getConnection((err, connection) => {
if(err) {
if(err.code === 'PROTOCOL_CONNECTION_LOST') {
console.log('Database connection lost.')
}
if(err.code === 'ER_CON_COUNT_ERROR') {
console.log('Database has too many connections.')
}
if(err.code === 'ECONNREFUSED') {
console.log('Database connection refused.')
}
if(err.code === 'POOL_CLOSED') {
console.log('Pool is closed.')
}
}
if(connection) {
connection.release()
}
return;
});
module.exports = pool;
I have changed the code to use async.series instead of async.queue, beacuse the tasks would run in parallel in queue (see: https://caolan.github.io/async/docs.html#queue).
test.js
...
let tasks = [];
context.forEach((ctx) => {
tasks.push(function(callback) { load(ctx, callback) });
});
async.series(tasks, function(err) {
if(err) return next(err);
});
I need a persistent MySQL connection for my Node web app. The problem is that this happens about a few times a day:
Error: Connection lost: The server closed the connection.
at Protocol.end (/var/www/n/node_modules/mysql/lib/protocol/Protocol.js:73:13)
at Socket.onend (stream.js:79:10)
at Socket.EventEmitter.emit (events.js:117:20)
at _stream_readable.js:895:16
at process._tickCallback (node.js:415:13)
error: Forever detected script exited with code: 8
error: Forever restarting script for 2 time
info: socket.io started
Here is my connection code:
// Yes I know multipleStatements can be dangerous in the wrong hands.
var sql = mysql.createConnection({
host: 'localhost',
user: 'my_username',
password: 'my_password',
database: 'my_database',
multipleStatements: true
});
sql.connect();
function handleDisconnect(connection) {
connection.on('error', function(err) {
if (!err.fatal) {
return;
}
if (err.code !== 'PROTOCOL_CONNECTION_LOST') {
throw err;
}
console.log('Re-connecting lost connection: ' + err.stack);
sql = mysql.createConnection(connection.config);
handleDisconnect(sql);
sql.connect();
});
}
handleDisconnect(sql);
As you can see, the handleDisconnect code does not work..
Use the mysql connection pool. It will reconnect when a connection dies and you get the added benefit of being able to make multiple sql queries at the same time. If you don't use the database pool, your app will block database requests while waiting for currently running database requests to finish.
I usually define a database module where I keep my queries separate from my routes. It looks something like this...
var mysql = require('mysql');
var pool = mysql.createPool({
host : 'example.org',
user : 'bob',
password : 'secret'
});
exports.getUsers = function(callback) {
pool.getConnection(function(err, connection) {
if(err) {
console.log(err);
callback(true);
return;
}
var sql = "SELECT id,name FROM users";
connection.query(sql, [], function(err, results) {
connection.release(); // always put connection back in pool after last query
if(err) {
console.log(err);
callback(true);
return;
}
callback(false, results);
});
});
});
I know this is super delayed, but I've written a solution to this that I think might be a bit more generic and usable. I had written an app entirely dependent on connection.query() and switching to a pool broke those calls.
Here's my solution:
var mysql = require('mysql');
var pool = mysql.createPool({
host : 'localhost',
user : 'user',
password : 'secret',
database : 'test',
port : 3306
});
module.exports = {
query: function(){
var sql_args = [];
var args = [];
for(var i=0; i<arguments.length; i++){
args.push(arguments[i]);
}
var callback = args[args.length-1]; //last arg is callback
pool.getConnection(function(err, connection) {
if(err) {
console.log(err);
return callback(err);
}
if(args.length > 2){
sql_args = args[1];
}
connection.query(args[0], sql_args, function(err, results) {
connection.release(); // always put connection back in pool after last query
if(err){
console.log(err);
return callback(err);
}
callback(null, results);
});
});
}
};
This instantiates the pool once, then exports a method named query. Now, when connection.query() is called anywhere, it calls this method, which first grabs a connection from the pool, then passes the arguments to the connection. It has the added effect of grabbing the callback first, so it can callback any errors in grabbing a connection from the pool.
To use this, simply require it as module in place of mysql. Example:
var connection = require('../middleware/db');
function get_active_sessions(){
connection.query('Select * from `sessions` where `Active`=1 and Expires>?;', [~~(new Date()/1000)], function(err, results){
if(err){
console.log(err);
}
else{
console.log(results);
}
});
}
This looks just like the normal query, but actually opens a pool and grabs a connection from the pool in the background.
In response to #gladsocc question:
Is there a way to use pools without refactoring everything? I have
dozens of SQL queries in the app.
This is what I ended up building. It's a wrapper for the query function. It will grab the connection, do the query, then release the connection.
var pool = mysql.createPool(config.db);
exports.connection = {
query: function () {
var queryArgs = Array.prototype.slice.call(arguments),
events = [],
eventNameIndex = {};
pool.getConnection(function (err, conn) {
if (err) {
if (eventNameIndex.error) {
eventNameIndex.error();
}
}
if (conn) {
var q = conn.query.apply(conn, queryArgs);
q.on('end', function () {
conn.release();
});
events.forEach(function (args) {
q.on.apply(q, args);
});
}
});
return {
on: function (eventName, callback) {
events.push(Array.prototype.slice.call(arguments));
eventNameIndex[eventName] = callback;
return this;
}
};
}
};
And I use it like I would normally.
db.connection.query("SELECT * FROM `table` WHERE `id` = ? ", row_id)
.on('result', function (row) {
setData(row);
})
.on('error', function (err) {
callback({error: true, err: err});
});