I'm using node with mysql and I have a route that does:
const mysql = require("./mysql");
router.post("/register_user", (req, res) => {
mysql.register(req.body).then((result) => {
// stuff
});
});
mysql.js:
const mysql = require("mysql");
const connection = mysql.createConnection("mysql://...");
exports.register = (req) => {
const user = { name: req.name };
return new Promise((resolve, reject) => {
// make sure user doesn't exist already
connection.query('...', [user], (err, data) => {
...
if (isNewUser) {
connection.query('INSERT INTO USER...', user, (insertErr, rows) => {
...
resolve(rows);
connection.end();
}
}
});
});
}
This works perfectly when I register the first user in my app. But immediately after, if I log out (on the web app), then register a new user, I get an error saying:
Error: Cannot enqueue Query after invoking quit.
Why doesn't this create a new connection?
I assume you are using the following NPM module mysql
If it is the case then could you simply use MySQL pooling connections ?
Rather than creating and managing connections one-by-one, this module also provides built-in connection pooling using mysql.createPool(config).
So instead of calling connection.end(); you would be calling connection.release(); instead to return connection to the pool of open connections.
Related
I am trying to fix a bug in my node.js application, which is preventing a program I have written from ending successfully.
The script is returning the required results, but something is still running in the background which appears to be preventing it from terminating.
Through process of elimination, I have been able to narrow down the culprit to this getPlayers() function; but I am not sure why the problem is being caused.
Firstly, I am exporting the database pool from a module, like so:
require('dotenv').config()
const mysql = require("mysql"),
pool = mysql.createPool({
host: process.env.dbHost,
user: process.env.dbUser,
password: process.env.dbPassword,
database: process.env.dbDatabase
})
pool.connectionLimit = 15
module.exports = pool
And this is the query that appears to be causing all of the problems:
const getPlayers = () => {
return new Promise((resolve, reject) => {
db.query('SELECT * FROM cc_players', (err, res) => {
if(err)
console.log(err)
resolve(res)
})
})
}
If I comment out the db.query() function, and simply resolve an empty array, the script terminates as expected. Is there something about the database query that could be causing the script to continue running in the background?
Because you're creating a pool, previously opened SQL connections will not be closed, and instead kept for later uses. This is why Node.JS never exits.
To fix this, the mysql package provides a pool.end function to close all connections in the pool. You should call it when your script is ready to exit, perhaps like so:
function onExit() {
// ...
// Assuming ``db`` is the pool you created
db.end()
// ...
}
Beware that no further SQL operations can be performed on the pool after pool.end is called.
You created a pool, which will keep connections open for re-use. As long as there's open sockets, Node.js will not exit.
You have to use alternative way because create a pool will keep the connection opened in background, so you can use createConnection function instead of createPool then you can close the connection manually
let mysql = require('mysql');
let connection = mysql.createConnection({
host: process.env.dbHost,
user: process.env.dbUser,
password: process.env.dbPassword,
database: process.env.dbDatabase
});
const getPlayers = () => {
return new Promise((resolve, reject) => {
db.query('SELECT * FROM cc_players', (err, res) => {
if(err)
console.log(err)
resolve(res)
})
})
}
And once get the callback you can close the connection
getPlayer().then(res => {
.
.
.
connection.end();
})
Our stack is nodejs with MySQL we're using MySQL connections pooling our MySQL database is managed on AWS aurora .
in case of auto failover the master DB is changed the hostname stays the same but the connections inside the pool stays connected to the wrong DB.
The only why we found in order to reset the connection is to roll our servers.
this is a demonstration of a solution I think could solve this issue
but I prefer a solution without the set interval
const mysql = require('mysql');
class MysqlAdapter {
constructor() {
this.connectionType = 'MASTER';
this.waitingForAutoFaileOverSwitch = false;
this.poolCluster = mysql.createPoolCluster();
this.poolCluster.add(this.connectionType, {
host: 'localhost',
user: 'root',
password: 'root',
database: 'app'
});
this.intervalID = setInterval(() => {
if(this.waitingForAutoFaileOverSwitch) return;
this.excute('SHOW VARIABLES LIKE \'read_only\';').then(res => {
// if MASTER is set to read only is on then its mean a fail over is accoure and swe need to switch all connection in poll to secondry database
if (res[0].Value === 'ON') {
this.waitingForAutoFaileOverSwitch = true
this.poolCluster.end(() => {
this. waitingForAutoFaileOverSwitch = false
});
};
});
}, 5000);
}
async excute(query) {
// delay all incoming request until pool kill all connection to read only database
if (this.waitingForAutoFaileOverSwitch) {
return new Promise((resolve, reject) => {
setTimeout(() => {
this.excute(query).then(res => {
resolve(res);
});
}, 1000);
});
}
return new Promise((resolve, reject) => {
this.poolCluster.getConnection(this.connectionType, (err, connection) => {
if (err) {
reject(err);
}
connection.query(query, (err, rows) => {
connection.release();
if (err) {
reject(err);
}
resolve(rows);
});
});
});
}
}
const adapter = new MysqlAdapter();
Is there any other programmable way to reset the connection inside the pool?
Is there any notification we can listing to In case of auto-failover?
Instead of manually monitoring the DB health, as you have also hinted, ideally we subscribe to failover events published by AWS RDS Aurora.
There are multiple failover events listed here for the DB cluster: Amazon RDS event categories and event messages
You can use and test to see which one of them is the most reliable in your use case for triggering poolCluster.end() though.
I'm fairly new to how database connections work using nodejs, and I'm having issues with database connections that aren't being closed properly. I've asked a few questions on here before about it, and it seems like everyone is telling me to use pool instead of the way I have been doing it. The only problem is that when I search online about using pool from promise-mysql, everyone seems to use a very simple and generic approach, but I'm using it within a complex application using sockets. So I'm wondering how I can switch my old approach using createConnection() to using pool instead, in hopes of clearing up these connection issues.
Each time I call a socket it makes a connection to the database and then releases it after it is complete, or so it seems. It sounds like this is not a very scalable approach, and that using pool will help run multiple queries in parallel.
db.js:
import mysql from 'promise-mysql';
import env from '../../../env.config.json';
const db = async (sql, descriptor, serializedParameters = []) => {
return new Promise( async (resolve, reject) => {
try {
const connection = await mysql.createConnection({
host: env.DB.HOST,
user: env.DB.USER,
password: env.DB.PASSWORD,
database: env.DB.NAME,
port: env.DB.PORT
})
if (connection && env.ENV === "development") {
//console.log(/*"There is a connection to the db for: ", descriptor*/);
}
let result;
if(serializedParameters.length > 0) {
result = await connection.query(sql, serializedParameters)
} else result = await connection.query(sql);
connection.end();
resolve(result);
} catch (e) {
console.log("ERROR pool.db: " + e);
reject(e);
};
});
}
export default db;
This is an example of how I would create a connection to query the db
inventory.js:
import db from '../API/db';
export const selectAllFromBuildItems = () => {
return new Promise( async (resolve, reject) => {
try {
const getAllBuildItems = "SELECT * FROM mydb.build_items;"
const response = await db(getAllBuildItems, "AllBuildItems");
resolve(response);
} catch (e) {
console.log("ERROR inventory.selectAllFromBuildItems: " + e);
reject(e);
}
});
};
How can I change my code so that I use a pool instead. I have a lot of different queries that can be called from our application so I'm not quite sure what the right approach for this would be. I saw some people say that I should create the pool once and then use it throughout the application, but I don't know where that would go. If anyone has any suggestions on how I can make this switch, that would help me out a lot. Thanks!
Create the pool. Better if you create once when you run your application.
If it is in different file then you have export here and import in required file.
var pool = mysql.createPool({
host: env.DB.HOST,
user: env.DB.USER,
password: env.DB.PASSWORD,
database: env.DB.NAME,
connectionLimit: 10
});
I had to create this prototype function as the library had a bug of close connection was not returning the connection to the pool.
pool.prototype.releaseConnection = function releaseConnection(connection) {
return this.pool.releaseConnection(connection.connection);
};
Funtion for getting connection from the pool that is created earlier.
If you want you can call pool.getConnection() in all your query functions.
function connect() {
return pool.getConnection().then(function(connection) {
return connection
}).catch(function(e) {
console.log("Error Creating Connection");
throw e;
});
}
Now this is your query function to get data from dd.
function selectAllFromBuildItems() {
var sql_query = `SELECT * FROM mydb.build_items`;
return connect().then(function(conn) {
return conn.query(sql_query).then(function(rows) {
pool.releaseConnection(conn);
return rows;
});
}).catch(function(e) {
console.log("ERROR inventory.selectAllFromBuildItems: " + e);
throw e;
});
}
Update: Descriptions are added. Hope this helps you.
I need to know how to disconnect from my MySQL database after lots of individual callbacks have finished. I have a node.js cron script running on AWS EC2 which accesses s3 buckets and MySQL databases on AWS RDS. The cron script looks something like this:
const mysql = require("mysql2"),
AWS = require("aws-sdk"),
s3 = new AWS.S3(),
connection = mysql.connect({...});
connection.connect();
connection.query(`SELECT ... LIMIT 100`, (error, results) => {
if (error) throw new Error(error);
for (let idx in results) {
const row = results[idx],
Key = `my/key/${row.id}`;
s3.getObject({Bucket, Key}, (error, object) => {
// do more things, with more callbacks
});
}
});
setTimeout(() => connection.end(), 10000); // disconnect database in 10 seconds
The script doesn't exit until I disconnect from the database using connection.end(). I can't disconnect as normal e.g. after the for loop, because the various callbacks are still running. I need to know when they're all finished. Currently I just disconnect after 10 seconds because everything should have completed by then. If I don't do that then I end up with lots of never-ending processes running.
Do I need to set flags & counts of each thing, and then use setInterval or something until they're all finished and it's safe to disconnect? OK to do but is that the right approach when using callbacks, promises & thens?
You can do it with counters or flags as you said, or with Promise.all:
const mysql = require("mysql2"),
AWS = require("aws-sdk"),
s3 = new AWS.S3(),
connection = mysql.connect({...});
function doQuery(){
connection.connect();
return new Promise((resolve, reject)=>{
connection.query(`SELECT ... LIMIT 100`, (error, results) => {
if (error) { return reject(new Error(error)); }
resolve(results)
});
})
}
doQuery()
.then(results => {
const jobs = results.map(row => {
const Key = `my/key/${row.id}`;
return new Promise((resolve, reject) => {
s3.getObject({Bucket, Key}, (error, object) => {
// do more things, with more callbacks
resolve('ok')
});
})
})
return Promise.all(jobs)
})
.finally(()=>{
connection.end()
})
I just wanted to post as well that Promise.all() is definitely a great way to go, however it's not the only approach.
In this day & age, where the cost of connecting to & disconnecting from your database can be very cheap, I find it simpler to just connect on every query and disconnect after:
const dbOneQuery = (sql, bindVars, callback) => {
const dbConnection = getConnection(); // mysql2.createConnection etc
dbConnection.query(sql, bindVars, (error, result) => {
dbConnection.end();
if (callback) callback(error, result);
});
};
and that way there aren't any connections held open to be closed.
If in future I move to persistent connections again, I can just change what getConnection() does and use something that overrides .end() etc..
For me this approach has been simpler overall compared to managing a single shared connection to the database, with no real downsides.
I am working on a discord.js bot, and I'm storing a bunch of information on various servers in a database. The problem is, that the code doesn't wait for the database to return the results. In the current situation, I'm trying to check if the server specific prefix checks out.
I tried using async and await at various places, but those didn't work. If I could, I'd rather not use .then(), because I don't really want to put all the commands inside a .then().
const { Client, Attachment, RichEmbed } = require('discord.js');
const client = new Client();
const mysql = require("mysql");
const config = require("./config.json")
var con = mysql.createConnection({
host: 'localhost',
user: 'root',
password: '',
database: 'botdb'
})
client.on("ready", () => {
console.log("I'm ready")
})
client.on("message", message => {
if (message.author.bot) return;
if (message.channel.type === 'dm') return;
let msg = message.content.split(" ");
let command = msg[0];
let prefix;
con.query(`SELECT * FROM serversettings WHERE ServerID = ${message.guild.id}`, (err, rows) => {
if (err) throw err;
prefix = rows[0].Prefix;
console.log(prefix)
})
console.log(`Prefix: ${prefix}, Command: ${command}`)
if (command === `${prefix}examplecommand`) {
//Do something
}
//Other code that uses prefix and command
}
It should log the prefix first, and then the Prefix: ${prefix}, Command: ${command} part, but it does it the other way around, so the examplecommand doesn't work.
Your result is caused by the fact that what's outside your query callback is executed immediately after the call. Keep in mind the mysql module is callback-based.
Possible Solutions
Place the code inside the callback so it's executed when the query is completed.
Wrap the query in a promise and await it.
function getGuild(guildID) {
return new Promise((resolve, reject) => {
con.query(`SELECT * FROM serversettings WHERE ServerID = '${guildID}', (err, rows) => {
if (err) return reject(err);
resolve(rows);
});
});
}
const [guild] = await getGuild(message.guild.id) // destructuring 'rows' array
.catch(console.error);
console.log(guild.prefix);
Use a Promise-based version of a MySQL wrapper, like promise-mysql. You could use it the same way as the code above, without worrying about coding your own Promises.
const [guild] = await con.query(`SELECT * FROM serversettings WHERE serverID = '${message.guild.id}'`)
.catch(console.error);
console.log(guild.prefix);