In my Node script I use MySQL and to be able to handle multiple connections I use a connection pool.
Today I forgot to release a connection in the mysql pool. It took me a long time to figure out what the problem was because there was no error shown anywhere.
My code:
const mysql = require('mysql');
const pool = mysql.createPool({
host : 'x',
user : 'x',
password : '#x',
database : 'x',
connectionLimit: 2
});
function executeQuery(){
pool.getConnection((err, connection) => {
if(err) console.log(err);
let query = mysql.format("SELECT * FROM users WHERE id = ?", 1);
connection.query(query, (err, rows) => {
if(err) console.log(err);
console.log(rows);
});
});
}
executeQuery(); // outputs the user as expected
executeQuery(); // outputs the user as expected
executeQuery(); // there is no output in the console, it just looks like nothing happened
My question: How to find out if there are still connections available and if there are no connection available anymore show an error or handle it in a different way?
You forgot to release your connection:
function executeQuery(){
pool.getConnection((err, connection) => {
if(err) console.log(err);
connection.query("SELECT * FROM users WHERE id = ?", [ 1 ], (err, rows) => {
connection.release(); // Give it back or else it gets lost
if(err) console.log(err);
console.log(rows);
});
});
}
There's also no reason to grab the connection like that, you can just use the pool:
function executeQuery() {
pool.query("SELECT * FROM users WHERE id = ?", [ 1 ], (err, connection) => {
if(err) console.log(err);
console.log(rows);
});
}
Related
I have a Discord.js bot with a MySQL as a database. The problem I'm having is that the SQL stopped querying after a random amount of times, the only way that I could fix this is by restarting the node.js app
My bot involves a lot of sql querying inside of an sql query similar to :
sql.query(`SELECT xxxxx` , (err, res) => {
sql.query(`SELECT xxxxx`, (err, result) => {}
}
And my SQL pool code is :
const mysql = require('mysql');
const pool = mysql.createPool({
host : "localhost",
port : 3306,
user : "x",
password: "x",
database: 'x'
});
let sql = {};
sql.query = function(query, params, callback) {
pool.getConnection(function(err, connection) {
if(err) {
if (callback) callback(err, null, null);
return;
}
connection.query(query, params, function(error, results, fields) {
connection.release();
if(error) {
if (callback) callback(error, null, null);
return;
}
if (callback) callback(false, results, fields);
});
});
};
module.exports = sql;
My VPS is running fine, my SQL server is running fine as well. I'm not sure what's causing the problem.
My current solution is running a cronjob every 30 minutes to restart the application, I'm not sure if this is a good practice or not.
I am making a Discord Level Bot, the bot will insert a random amount of XP each time a user's typed a message in the chat. To see a user's level I have a !level command. Like this :
sql.query(`SELECT * FROM WMembers where DiscordID = ${message.author.id}`, (err, rows) => {
if(err) console.log(err)
if(!rows[0]) return message.channel.send("The user has no XP!")
let xp = rows[0].XP
let level = rows[0].Level
let nextLevel = level * 40
message.channel.send(**Level: **${level - 1}\n**Points: **${xp} / ${nextLevel}`)
})
However, when I call the command more than 2-3 times,the queries start executing extremely slowly, taking 5 minutes to finally return the value.
Here is my sql code :
const pool = mysql.createPool({
host : keys.dbHost,
port : 3306,
user : keys.dbUser,
password: keys.dbPass,
database: keys.dbName
});
let sql = {};
sql.query = function(query, params, callback) {
pool.getConnection(function(err, connection) {
if(err) {
if (callback) callback(err, null, null);
return;
}
connection.query(query, params, function(error, results, fields) {
connection.release();
if(error) {
if (callback) callback(error, null, null);
return;
}
if (callback) callback(false, results, fields);
});
});
};
If someone can help me, I will greatly appreciate it. Thank you.
you clearly don't have enough memory for your database: https://dev.mysql.com/doc/mysql-monitor/4.0/en/system-prereqs-reference.html
MySQL's minimum requirements dictate 2GB of memory. you won't get far with 128MB. On that note, just like it has been adviced by #ExploitFate, limiting the number of connections your application can make to the database will also save you some memory
In my project, I access the MySQL database. I can call and run queries in this database through the program. However, after a while, the called queries become dysfunctional or not called at all.
The example run of the queries:
When I investigated my problem, I found a solution that I needed to increase the maximum number of connections. However, even if I used it, there was no change.
I tried this code: SET GLOBAL max_connections = 150; from this source.
The connection part via node.js:
const connection = mysql.createPool({
host : '192.168.1.101',
user : 'db_manager',
password : '...',
database : 'venue_recommendation'
});
const app = express();
app.get('/venues', function (req, res) {
let sQuery = "SELECT * FROM mekanlar WHERE mekanlar.mahalle_no=\""+req.query.neig+"\" AND mekanlar.puan >="+req.query.star+" AND mekanlar.fiyat <="+req.query.price+";"
console.log(">>>>>",sQuery)
connection.getConnection(function (err, connection) {
if(err) throw err;
connection.query(sQuery, function (error, results, fields) {
if (error) throw error;
res.send(results);
});
});
});
app.get('/Cuisines', function (req, res) {
let sQuery = "SELECT * FROM mutfaklar;"
console.log(">>>>>",sQuery)
connection.getConnection(function (err, connection) {
if(err) throw err;
connection.query(sQuery, function (error, results, fields) {
if (error) throw error;
res.send(results);
});
});
});
This is my first project using MySQL and NodeJS, I am used to Mongo, so I might be doing something stupid here. Locally everything works fine (using MySQL) but when I deploy the following code to my hosting (that uses MariaDB) only the parent query inserts into the table (leads). The other table stays empty. Another issue is I don't have access to the NodeJS logs when it is deployed, but as far as I can tell the nested queries never get called.
var mysql = require('mysql');
global.db = mysql.createPool({
host : 'localhost',
user : 'client',
password : '******',
database : 'db'
});
router.post('/', function(req, res){
const d = req.body
let subscribe = (d.subscribe ? 1 : 0)
global.db.getConnection((err, conn) => {
if (err) {
res.end(JSON.stringify(err));
} else {
let lead = [null, d.voornaam, d.achternaam, d.email, d.postcode, d.opmerkingen, d.soort, subscribe]
let sql = 'INSERT INTO leads VALUES ?';
conn.query(sql, [[lead]], (err, results) => {
if (err) {
res.end(JSON.stringify(err));
conn.release();
} else {
const lead_id = results.insertId
d.types.forEach(w => {
let wens = [null, lead_id, w.woningType, w.slaapkamers, w.prijs, w.oplevering]
let sql = 'INSERT INTO wensen VALUES ?';
conn.query(sql, [[wens]], (err, results) => {
if(err) {
res.end(JSON.stringify(err));
conn.release();
}
})
})
res.end('True');
conn.release();
}
})
}
})
});
Check syntax. Note parens:
'INSERT INTO leads VALUES (?)'
Did this fail to tell you that?
if (err) { res.end(JSON.stringify(err)); ... }
My current isolation level for MySQL is tx_transaction = REPEATABLE-READ for each session.
So when I run the below code in different terminals the transactions are serially executed, meaning before the commit of the first transaction, the second would not start.
START TRANSACTION;
SELECT *
FROM test
WHERE id = 4 FOR UPDATE;
UPDATE test
SET parent = 98
WHERE id = 4;
So if I implement this in nodeJS, which of the following would give same result as running two terminals?
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'localhost',
user : 'me',
password : 'secret',
database : 'my_db'
});
connection.connect();
let query =
START TRANSACTION;
SELECT *
FROM test
WHERE id = 4 FOR UPDATE;
UPDATE test
SET parent = 98
WHERE id = 4;
connection.query(query, function (error, results, fields) {
if (error) throw error;
console.log('The solution is: ', results[0].solution);
});
connection.query(query, function (error, results, fields) {
if (error) throw error;
console.log('The solution is: ', results[0].solution);
});
connection.end();
or using pools
var mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit : 10,
host : 'example.org',
user : 'bob',
password : 'secret',
database : 'my_db'
});
let query =
START TRANSACTION;
SELECT *
FROM test
WHERE id = 4 FOR UPDATE;
UPDATE test
SET parent = 98
WHERE id = 4;
pool.query(query, function (error, results, fields) {
if (error) throw error;
console.log('The solution is: ', results[0].solution);
connection.release();
});
pool.query(query, function (error, results, fields) {
if (error) throw error;
console.log('The solution is: ', results[0].solution);
connection.release();
});
My first guess was that the pools would create separate connections and sending queries in same connection would be same as typing in queries in the same terminal. However the documentation says https://github.com/mysqljs/mysql#pooling-connections under introduction section that
Every method you invoke on a connection is queued and executed in
sequence.
and I am not exactly sure what that means.
Also, if I use connection pooling, can I be 100% sure that the concurrently running queries are handled by different sessions? So for example if the pool is not released in the first query, would the second query ALWAYS be executed by another session?
I have done a few tests and realized that Connection Pooling results to the expected outcome.
when I do the following with just connection
let pool = mysql.createConnection({
connectionLimit:10,
host: 'localhost',
user: 'root',
password: 'thflqkek12!',
database: 'donationether'
});
connection.beginTransaction(function (err) {
console.log('first transaction has started');
if (err) {
console.log(err);
return;
}
connection.query(`INSERT INTO users VALUES (null, 0, 'username', 'token')`, function (err, results, fields) {
if (err) {
console.log(err);
return;
}
setTimeout(function () {
connection.commit(function (err) {
if (err) {
console.log(err);
return;
}
console.log('first query done');
connection.release();
})
}, 2000)
});
});
connection.beginTransaction(function (err) {
console.log('second transaction has started');
if(err) {
console.log(err);
return;
}
connection.query(`UPDATE users SET username = 'c_username' WHERE username = 'username'`,function (err, results, fields) {
if(err) {
console.log(err);
return;
}
connection.commit(function (err) {
if(err) {
console.log(err);
return;
}
console.log('second query done');
connection.release();
})
});
});
It leads to following output
first transaction has started
second transaction has started
second query done
first query done
Meaning that the transaction opened by the first connection is ignored and the second transaction finishes before. However, when I use connection pooling for following code,
let pool = mysql.createPool({
connectionLimit:10,
host: 'localhost',
user: 'root',
password: 'thflqkek12!',
database: 'donationether'
});
pool.getConnection(function (err, connection) {
connection.beginTransaction(function (err) {
console.log('first transaction has started');
if (err) {
console.log(err);
return;
}
connection.query(`INSERT INTO users VALUES (null, 0, 'username', 'token')`, function (err, results, fields) {
console.log('first query has started');
if (err) {
console.log(err);
return;
}
setTimeout(function () {
connection.commit(function (err) {
if (err) {
console.log(err);
return;
}
console.log('first query done');
connection.release();
});
}, 2000)
});
});
});
pool.getConnection(function (err, connection) {
connection.beginTransaction(function (err) {
console.log('second transaction has started');
if(err) {
console.log(err);
return;
}
connection.query(`UPDATE users SET username = 'c_username' WHERE username = 'username'`,function (err, results, fields) {
console.log('second query has started');
if(err) {
console.log(err);
return;
}
connection.commit(function (err) {
if(err) {
console.log(err);
return;
}
console.log('second query done');
connection.release();
})
});
});
});
The output is as following
first transaction has started
second transaction has started
first query has started
//2seconds delay
second query has started
first query done
second query done
meaning that the first transaction is blocking the second transaction from executing.
So when the documentation said
Every method you invoke on a connection is queued and executed in sequence
It meant that they are delivered to the database in sequence but it will still be asynchronous and parallel even under transaction. However, connection pooling leads to instantiation of multiple connections and transaction within different pool connection behaves as expected for each transaction.