NodeJS Mysql - Connection not being released - mysql

It seems that MySQL pool isn't releasing connection from my NodeJS app. When looking at the server processes from mysql, connections are established but not released.
The app is crashing with the following error: TypeError: Cannot read property 'query' of undefined. From my understanding, this is because i've exceeded connections limit.
To note, I'm using Heroku MySQL plugin and have a limit of 10 connections for testing. I've added connectionLimit to end of my .env DB_URL to try to limit my connections in the pool.
In my .env file, the link is formatted as such:
DEV_DB_URL=mysql://user:pass#host:3306/db_name?connectionLimit=5
The db.js file:
var mysql = require('mysql');
var dbURL = process.env.PRODUCTION === 'true'
? process.env.LIVE_DB_URL
: process.env.DEV_DB_URL;
var dbConnection = function dbConnection( sql, values, next){
// It means that the values hasn't been passed
if (arguments.length === 2){
next = values;
values = null;
}
var pool = mysql.createPool(dbURL);
pool.getConnection(function(err, connection){
connection.query(sql, values, function(err) {
connection.release();
if (err) {
throw err;
}
// Execute the callback
next.apply(this, arguments);
});
});
};
module.exports = dbConnection;
My controller that uses the db.js file looks like this:
var DB = require('../services/db');
exports.updateReporting = function(req, res){
var body = req.body;
var sentStatus = body.edc;
if( sentStatus === 'pass'){
res.status(200)
.send({
message: 'EDC is all good.'
})
}
if( sentStatus === 'fail'){
var dateTime = require('node-datetime');
var dt = dateTime.create();
var curTimestamp = dt.format('Y-m-d H:M:S');
var storeId = body.store_id
.substring('Addr1='.length);
var fileTimestamp = body.file_ts;
var data = {
status: sentStatus,
store_id: storeId,
file_ts: fileTimestamp,
current_ts: curTimestamp
};
DB('INSERT INTO edc_reporting SET ?', data, function( err, row ){
if (err) throw err;
});
res.status(200)
.send({
message: 'Message recorded.'
})
}
};
As i'm new to Node, i'm not sure if my connection.release() is in the wrong place and not executed because of call back?
Any pointers would be appreciated.

This was really silly mistake.
In the db.js file, I had var pool = mysql.createPool(dbURL); inside the exported function; this is wrong.
Every time there was a request, the connection would be re-created and a new pool started. To solve the issue, I moved the creation of the pool outside the dbConnection function, hence, only 1 pool was created and connecting was started.

Despite declaring the pool outside the export function and releasing connection with connection.release() it didnot work for me. Using pool.query instead of connection.query worked perfectly fine it releases connection automatically everytime.

Related

ECONNRESET/ETIMEDOUT error with mysql2 nodejs pooled connection

I have a node/express/mysql2 web app that accesses a mySql DB through a connection pool object and I often run into the following issue: I leave the code for a while then when i come back and access pages that run queries I'll get
Error in foo: Error: connect ETIMEDOUT
Error in bar: Error: read ECONNRESET
I guess that on the other side mysql sees idle connections and close them, the client app doesn't know that, get those connections from the pool and then run into those issues, fine. But I was under the impression that this was automatically handled by mysql2 ?
This is roughly how i organised the db code
sqlConnectionPool.js
const dbParam = require('./dbParam.js');
const sqlPool = require('mysql2/promise').createPool(dbParam.connection.prod);
module.exports = sqlPool;
dummyQuery.js
const sqlPool = require('./sqlConnectionPool.js');
module.exports.updatefoo = async (ID, sqlConnection = undefined) => {
let connection;
try {
connection = sqlConnection === undefined ? await sqlPool.getConnection() : await sqlConnection;
const [updateResult] = await connection.query('update foo set barID=?', [ID]);
if (updateResult.affectedRows !== 1) {
throw (new Error(`error on ID ${ID}`));
}
return undefined;
} catch (err) {
console.error(`Error in updatefoo: ${err}`);
return err;
} finally {
if (sqlConnection === undefined) {
connection.release();
}
}
};
Is there something I'm missing to have those errors automatically handled, or simply not run into them ? I guess the mysql2 library needs to close the connection when they get connreset or conntimeout error and return them to the pool...
Thanks !
I think you should use something like setTimeout() to jab good the DB.

How does Node's require() resolve, such that persisted database pooling in Express works?

I feel that I am lacking some basic understanding of how nodejs' architecture is put together so that the code below runs with no problems. I'm going to lay out a simple app. Can you guys help me with the questions at the end?
Notes:
I am using the mysql package https://github.com/mysqljs/mysql
node app.js is run from the command line.
Inside app.js is this:
const Express = require("express");
const Path = require("path");
const app = Express();
// Require controller modules
var book_controller = require('../controllers/bookController');
var author_controller = require('../controllers/authorController');
router.get('/book', book_controller.books_get);
router.get('/author', book_controller.authors_get);
app.listen(5000, function(){
console.log("Server started on port 5000");
});
Inside bookController:
var getConnection = require('../config/mysql');
// Display list of all books
exports.book_get = function(req, res) {
getConnection(function(err, con) {
query = 'SELECT * FROM books';
con.query(query, function(err, result) {
if (err) throw err;
con.release();
res.render('page/authors', { result:result});
});
})
};
Inside authorController:
var getConnection = require('../config/mysql');
// Display list of all books
exports.authors_get = function(req, res) {
getConnection(function(err, con) {
query = 'SELECT * FROM authors';
con.query(query, function(err, result) {
if (err) throw err;
con.release();
res.render('page/books', { result:result});
});
})
};
Inside mysql.js
var mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit: 100,
connectTimeout: 5000,
acquireTimeout: 5000,
queueLimit: 30,
host: 'localhost',
user: '',
password: '',
database: '',
multipleStatements: true,
});
var getConnection = function(callback) {
pool.getConnection(function(err, connection) {
if (err) return callback(err);
callback(err, connection);
});
};
pool.on('acquire', function(connection) {
console.log('Connection %d acquired', connection.threadId);
});
module.exports = getConnection;
That's the layout. Here are the questions:
How do separate files requiring the same dependency interact? Books and Author controllers both need to require and access the mysql pool, assumedly from different users. Is a new copy of the MySQL object instantiated?
How does the state from the pool persist to the next connection?
How do separately required files interact? Books and Author controllers both need to require and access the mysql pool, assumedly from different users. Is a new copy of the MySQL object instantiated?
No, a new copy of the object will not be created on every call to require.
In Node, modules are loaded as needed and the resulting export object is cached for subsequent calls to require, so you'll get the exact same reference to getConnection every time you call require('../config/mysql'). The lines before module.exports = getConnection; run only the first time that the module is required.
How does the state from the pool persist to the next connection?
Because the exported getConnection is cached, that function will always refer to the same pool object, so both of your controllers are referring to the same pool.

NodeJs - How to share MySQL pool accross my models to avoid 'ER_CON_COUNT_ERROR'

I'm currently testing my node app using ApacheBench. I run into an issue with my database which is ER_CON_COUNT_ERROR: Too many connections.
I'm using a short library on the top of MySQL node module that you can see just below
var mysql = require('mysql');
var config = require('path/to/config');
var message = require('./myMessageLib.js');
var pool = mysql.createPool({
connectionLimit : 100,
host: config.db.mysql.host,
user: config.db.mysql.user,
password: config.db.mysql.password,
database: config.db.mysql.database
});
var query = function(query_str, values, next) {
pool.getConnection((err, connection) => {
if (err) {
console.error("MySQL Fail to get a connection in pool : " + err);
if (typeof connection !== "undefined")
connection.release();
next(error, null);
return ;
}
connection.query(query_str, values, function(error, data, fields) {
connection.release();
if (error)
if (config.app.env.dev)
throw (error);
else {
next(error, null);
return (message.error("MySQL query failed : " + query_str + " / err : " + error));
}
if (data.length == 0)
next(null);
else
next(data);
})
})
}
exports.query = query;
I use this library in my model by doing something like this
var mysql = require('path/to/mysqllib');
/**
* Class PlayerModel
*/
function PlayerModel() { };
PlayerModel.prototype.get = function(id, next) {
mysql.query("SELECT ....", [id], function(player) {
// stuff
})
}
module.exports = PlayerModel;
The things is on my homepage I use different models like the one presented above and each one launch a query to get some database information. When I launch an ApacheBench with only 50 concurrency levels I got the ER_CON_COUNT_ERROR: Too many connections. So I've got the feeling that the pool isn't well made because it seems that it didn't respect the connections limit of 100 written in the short MySQL lib.
I was thinking about creating and storing the pool in the global nodejs variable to be able to share it correctly accros my modules but I'm not sure it's a good way and maybe also I'm doing something wrong on my pool implentation.
Do you have any idea or improvements to suggest ?
Thanks mates!
I figured out the issue.
My app was deploying in cluster mode. Two process were running at the same time. Because of that, two pools of 100 connections could have been created which is resulting on a total of 200 connections which is higher than the MySQL default connection limit.
Great that found a solution and here's another one with less code.
create a js file, dbconnection.js for example
var mysql = require("mysql");
var pool = mysql.createPool({
connectionLimit: 10,
host: '...',
user: '...',
password: '...',
database: '...',
dateStrings: true
});
exports.connection = {
query: function () {
var queryArgs = Array.prototype.slice.call(arguments),
events = [],
eventNameIndex = {};
pool.getConnection(function (err, conn) {
if (err) {
if (eventNameIndex.error) {
eventNameIndex.error();
}
}
if (conn) {
var q = conn.query.apply(conn, queryArgs);
q.on('end', function () {
conn.release();
});
events.forEach(function (args) {
q.on.apply(q, args);
});
}
});
return {
on: function (eventName, callback) {
events.push(Array.prototype.slice.call(arguments));
eventNameIndex[eventName] = callback;
return this;
}
};
}
};
In the other file where you want to use the connection
var db = require('./dbconnection.js');
And instead of
connection.query
Use
db.connection.query

NodeJS: Heroku ClearDB not closing connections

I deployed a NodeJS API on Heroku and tried to connect it to a MySQL DB, so I created a connectionPool for handling connections on a ClearDB ignite account (free), which allows a maximum of 10 connections.
Every time I execute a query to the database it just adds a new connection to the stack until it reaches 10 connections and the app crashes.
My code is as follows:
connectionFactory:
var mysql = require('mysql');
function createDBConnection() {
var conn = mysql.createPool({
host: 'xxx',
user: 'xxx',
password: 'xxx',
database: 'xxx'
});
return conn;
}
module.exports = function() {
return createDBConnection;
}
And here's my select query:
function Dao(connection) {
this._connection = connection;
}
Dao.prototype.findAll = function (callback) {
this._connection.query('SELECT * FROM table',
function(errors, results) {
callback(errors,results);
});
};
module.exports = function() {
return Dao;
}
Finally here's the route I use to call it:
app.get('/products', function (req,res) {
var connection = app.persistence.connectionFactory();
var dao = new app.persistence.Dao(connection);
dao.findAll(function (err, result) {
res.format({
json: function () {
res.json(result);
}
});
});
});
I tried changing createPool() to createConnection() and then calling .end()/.destroy() function right after each query but it doesn't work at all.
Any hints?
In order to close a connection / return a connection to the pool use : connection.release()
var mysql = require('mysql');
var pool = mysql.createPool(...);
pool.getConnection(function(err, connection) {
// Use the connection
connection.query('SELECT something FROM sometable', function (error, results, fields) {
// And done with the connection.
connection.release();
// Handle error after the release.
if (error) throw error;
// Don't use the connection here, it has been returned to the pool.
});
});
mysql : Pooling connections Documentation

Node.js http with mysql pooling quits unexpectedly on error

So I started to try node.js this morning and was able to come-up with a http service that handles path requests and can connect to mysql with pooling for multiple transactions.
I am just having problems if ever I tried to make the password wrong, etc the node process quits unexpectedly.
var http = require("http");
var url = require("url");
var mysql = require('mysql');
var pool = mysql.createPool({
host : 'localhost',
user : 'root',
password : 'root',
database : 'test'
});
...
var pathname = url.parse(request.url).pathname;
var url_parts = url.parse(request.url, true);
var query = url_parts.query;
...
var table = query.table;
var sql = "SELECT * FROM " + table + "";
...
pool.getConnection(function(err, connection) {
console.log(err);
connection.on('error', function(err) {
console.log(err.code);
});
// Use the connection
connection.query(sql, function(err, rows) {
if (err) throw err;
console.log(rows);
response.writeHead(200, {
"Content-Type" : "application/json"
});
response.write(JSON.stringify(rows, null, 0));
connection.end();
response.end();
});
console.log(connection.sql);
console.log(connection.query);
});
Appreciate any help on how can I make it not to QUIT! and just say the damn error.
Anyway, I used forever to make this node.js to never quit on me, in-cases of error.
You use throw err, but don t catch it anywhere, causing node a UncaughtException Error, closing the app.