I have an API server which runs different API calls to a mysql database: this is the file which entirely handles the connections and the queries:
const mysql = require('mysql')
const pool = mysql.createPool({
host: sqlhost,
port: sqlport,
user: sqluser,
password: sqlpsw,
database: sqldb,
charset: 'utf8mb4',
connectionLimit: 10
})
module.exports = {
get: function (sqlQuery, type) {
return new Promise((resolve, reject) => {
pool.getConnection((err, connection) => {
if (err) reject(new Error(505))
else {
connection.query(sqlQuery, function (err, results) {
connection.release()
if (err !== null) reject(new Error(503))
else resolve(results)
})
}
})
})
}
}
After some time I get the Error: ER_CON_COUNT_ERROR: Too many connections.
I know i may have used pool.query instead of the flow pool.getConnection --> connection.query --> connection.release but the result is (I guess) the same.
I have no idea what is causing the "connections leak". Any idea? Thanks
Related
I'm new to Node and learning how to query data from MySQL.
I'm trying to use a pool connection with node.js and mysql but after some queries I get this error message
Error: ER_CON_COUNT_ERROR: Too many connections
{
code: 'ER_CON_COUNT_ERROR',
errno: 1040,
sqlMessage: 'Too many connections',
sqlState: undefined,
fatal: true
}
my code :
const mysql = require('mysql')
class database{
connect(database){
const pool = mysql.createPool({
connectionLimit : 10, //important
host : "localhost",
password: "psw",
user: "root",
database : "database",
multipleStatements: true,
debug : false
});
return pool
}
select(database,data,where,cound){
const link = this
return new Promise(function (resolve, reject) {
const pool = link.connect(database[0])
pool.getConnection(function(err, connection) {
if (err) throw err; // not connected!
let sql = "select "+data+" from "+link.table+" "+where+" "+cound+";";
connection.query(sql, function (error, results, fields) {
if (error) throw error;
connection.release();
resolve(results)
});
});
})
}
}
I can't solve this problem. thanks for your explain easily
I'm trying to build an application with Node and Express, it worked well yesterday until today I got this error:
{
code: "PROTOCOL_ENQUEUE_AFTER_FATAL_ERROR",
fatal: false
}
Google says I should use createPool rather than createConnection, but I'm not sure how to do it in my case. Here is my code:
const express = require('express');
const cors = require('cors');
const mysql = require('mysql');
const app = express();
const SELECT_ALL_USERS_QUERY = 'SELECT * FROM `mySchema`.`myTable`;';
const connection = mysql.createConnection({
host: 'localhost',
user: 'root',
password: 'abcdefg',
database: 'mySchema'
});
connection.connect(err => {
if (err) {
return err
}
});
app.use(cors());
app.get('/', (req, res) => {
res.send('go to /mySchema to see contents')
});
app.get('/myTable', (req, res) => {
connection.query(SELECT_ALL_USERS_QUERY, (err, results) => {
if (err) {
return res.send(err)
}
else {
return res.json({
data: results
})
}
})
});
app.listen(4000, () => {
console.log('MySchema SQL server listening on PORT 4000');
});
var mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit : 10,
host: 'localhost',
user: 'root',
password: 'abcdefg',
database: 'mySchema'
});
// for an example.....
pool.getConnection(function(err, connection) {
if (err) throw err; // not connected!
app.get('/myTable', (req, res) => {
// Use the connection
connection.query('SELECT something FROM sometable', function (error, results, fields) {
// When done with the connection, release it.
connection.release();
// Handle error after the release.
if (error) throw error;
// Don't use the connection here, it has been returned to the pool.
});
});
});
If you would like to close the connection and remove it from the pool, use connection.destroy() instead. The pool will create a new connection the next time one is needed.
Connections are lazily created by the pool. If you configure the pool to allow up to 100 connections, but only ever use 5 simultaneously, only 5 connections will be made. Connections are also cycled round-robin style, with connections being taken from the top of the pool and returning to the bottom.
When a previous connection is retrieved from the pool, a ping packet is sent to the server to check if the connection is still good.
you can follow this link: https://github.com/mysqljs/mysql#pooling-connections
To help other people, thought I'd post the complete solution which worked for me:
const express = require('express');
const cors = require('cors');
const mysql = require('mysql');
const app = express();
app.use(cors());
app.get('/', (req, res) => {
res.send('go to /mySchema to see contents')
});
const SELECT_ALL_USERS_QUERY = 'SELECT * FROM `mySchema`.`myTable`;';
const pool = mysql.createPool({
connectionLimit: 10,
host: 'localhost',
user: 'root',
password: 'abcdefg',
database: 'mySchema',
debug: false
});
pool.getConnection((err, connection) => {
if (err) throw err;
app.get('/myTable', (req, res) => {
connection.query(SELECT_ALL_USERS_QUERY, (err, results) => {
if (err) {
return res.send(err)
}
else {
return res.json({
data: results
})
};
});
console.log(connection);
});
});
app.listen(4000, () => {
console.log('MySchema SQL server listening on PORT 4000');
});
I'm new to node and I'm trying to use a MySQL pool cluster in but I'm not exactly sure how to export it.
At the moment I have the following in /libs/mysql.js:
poolCluster.add('db1', {
host: config.databases.hostname,
user: config.databases.db1.username,
password: config.databases.db1.password,
database: config.databases.db1.database,
connectionLimit: config.databases.connectionLimit
});
poolCluster.add('db2', {
host: config.databases.hostname,
user: config.databases.db2.username,
password: config.databases.db2.password,
database: config.databases.db2.database,
connectionLimit: config.databases.connectionLimit
});
module.exports = {
getConnection: (callback) => {
return poolCluster.getConnection(callback);
}
};
I'm trying to use it in models/monitor.js as below:
let poolCluster = require('../libs/mysql');
let moment = require('moment');
exports.select = function (sql, values, callback) {
poolCluster.getConnection('db1', (err, connection) => {
if (err) {
callback(err);
} else {
connection.query(sql, values, (err, result) => {
connection.release();
if (err) {
console.log(err);
callback(err);
} else {
callback(null, result)
}
})
}
})
};
The issue now is I'm getting an error stating cb is not a function.
Is this the correct way to export a mysql pool cluster in node?
You are exporting as getConnection: (callback) => {} but you are calling the same function with getConnection(string, callback).
According to the doc:
You can call getConnection like:
// Target Group : ALL(anonymous, MASTER, SLAVE1-2), Selector : round-robin(default)
poolCluster.getConnection(function (err, connection) {});
// Target Group : MASTER, Selector : round-robin
poolCluster.getConnection('MASTER', function (err, connection) {});
So, basically you need to pass the arguments you are getting from YOUR getConnection function to mysql's getConnection function. So this should do the trick:
module.exports = {
getConnection: (...args) => {
return poolCluster.getConnection(...args);
}
};
I have been doing google searches for 5 days, I hope to find the solution ... I know that it does not work because it is asynchronous, but I need the program (it is a Discord bot) to respond with a data that I get from a DB. I have tried Promises and callbacks, but I do not know if it is because I am a novice with asynchronous, that nothing works for me.
const con = mysql.createConnection({
host: datos.host,
user: datos.user,
password: datos.password,
database: datos.database
});
function leerPromesa() {
var promise = new Promise(function (resolve, reject) {
con.query('SELECT * from ranking;', function (err, rows, fields) {
if (err) {
reject(err);
return
}
resolve(rows);
rows.forEach(element => console.log(element));
})
});
return promise;
};
var promesa = leerPromesa();
promesa.then(
function (rows) {
rows.forEach(element => msg.reply(element));
},
function (err) {
msg.reply(err);
}
);
con.end();
What the bot does is respond with blank text.
First, you're not really connecting to database.
If you refer to docs https://github.com/mysqljs/mysql:
var connection = mysql.createConnection({
host : 'localhost',
user : 'me',
password : 'secret',
database : 'my_db'
});
// then connect method
connection.connect();
So your code will never work..
Second, you are closing connection before any query execution:
con.end();
Correct is to close connection after leerPromesa function execution.
Finally, code could look something like this:
const con = mysql.createConnection({
host: datos.host,
user: datos.user,
password: datos.password,
database: datos.database
});
con.connect();
function leerPromesa() {
return new Promise(function(resolve, reject) {
con.query("SELECT * from ranking;", function(err, rows, fields) {
if (err) {
return reject(err);
}
return resolve(rows);
});
});
}
leerPromesa()
.then(
function(rows) {
rows.forEach(element => msg.reply(element));
},
function(err) {
msg.reply(err);
}
)
.finally(function() {
con.end();
});
I used finally method on Promise to close connection in every situation https://developer.mozilla.org/ru/docs/Web/JavaScript/Reference/Global_Objects/Promise/finally
My very simple Node.js code doesn't seem like its connection pool work as it's supposed to do. _connectionQueue of Pool object just gets longer and longer infinitely, and app dies. I mean it does make a pool and there are pre-made connections already, but they are not reusable or insert requests are too many and fast? I'm not sure..
I've tried to put some more connectionLimit like following :
let state = { pool: null }
export const connect = () => {
state.pool = mysql.createPool({
connectionLimit: 200,
host: process.env.DATABASE_HOST || 'localhost',
user: process.env.DATABASE_USER || 'root',
password: process.env.DATABASE_PASSWORD || 'password',
database: process.env.DATABASE_NAME || 'database'
})
}
export const get = () => state.pool
Mostly given job of this server is subscription and insertion. It subscribes several MQTT topics and just tries to insert messages into RDB. About 100 messages arrives every second, and that code looks like this.
mqttClient.on('message', function (topic, message) {
if(topic.includes('sensor')){
try {
const data = JSON.parse(message.toString())
if(validate(data.uuid)){
const params = [data.a, data.b, data.c, ...]
sensor.setStatus(params)
}
} catch(err){
console.error(err)
}
}
}
export const setStatus = (params) => {
const SQL = `INSERT INTO ...`
db.get().query(SQL, params, (err, result) => {
if (err) console.error(err)
})
}
Then, I see this through chrome-devtools
Object
pool: Pool
config: PoolConfig {acquireTimeout: 10000, connectionConfig: ConnectionConfig, waitForConnections: true, connectionLimit: 200, queueLimit: 0}
domain: null
_acquiringConnections: []
_allConnections: (200) [PoolConnection, PoolConnection, …]
_closed: false
_connectionQueue: (11561) [ƒ, ƒ, ƒ, ƒ, …]
_events: {}
_eventsCount: 0
_freeConnections: []
_maxListeners: undefined
__proto__: EventEmitter
__proto__: Object
I've put console.log into setStatus like following :
export const setStatus = (params) => {
const SQL = `INSERT INTO ...`
console.log(`allConnections=${db.get()._allConnections.length}, connectionQueue=${db.get()._connectionQueue.length}`)
db.get().query(SQL, params, (err, result) => {
if (err) console.error(err)
})
}
, and got these.
allConnections=200, connectionQueue=29
allConnections=200, connectionQueue=30
allConnections=200, connectionQueue=31
allConnections=200, connectionQueue=32
allConnections=200, connectionQueue=33
allConnections=200, connectionQueue=34
...
It seems like server created a connection pool very well, but not using those connections. Instead, trying to create a new connection more and more all the time and those requests just get stuck in _connectionQueue.
It appears you are creating a new pool every time you'd like to make a query. The common model is to create a pool once when the application starts, then use connections from that pool as needed (one pool, many connections).
Also if you're using a simple DB model you can simplify access to the pool by making it global. Below is an alternate to your code you might try:
app.js
const mysql = require('mysql');
const connection = mysql.createPool({
host: process.env.DB_HOST || '127.0.0.1',
user: process.env.DB_USER || 'local_user',
password: process.env.DB_PASSWORD || 'local_password',
database: process.env.DB_NAME || 'local_database'
});
global.db = connection;
modules.js
export const setStatus = (params) => {
let SQL = `INSERT INTO ...`
db.query(SQL, params, (err, result) => {
if (err) console.error(err)
console.log(result)
})
}
Documentation for further reference :: https://github.com/mysqljs/mysql#pooling-connections
Edit 1 - Log pool events
db.on('acquire', function (connection) {
console.log('Connection %d acquired', connection.threadId);
});
db.on('connection', function (connection) {
console.log('Pool id %d connected', connection.threadId);
});
db.on('enqueue', function () {
console.log('Waiting for available connection slot');
});
db.on('release', function (connection) {
console.log('Connection %d released', connection.threadId);
});