I'm fairly new to how database connections work using nodejs, and I'm having issues with database connections that aren't being closed properly. I've asked a few questions on here before about it, and it seems like everyone is telling me to use pool instead of the way I have been doing it. The only problem is that when I search online about using pool from promise-mysql, everyone seems to use a very simple and generic approach, but I'm using it within a complex application using sockets. So I'm wondering how I can switch my old approach using createConnection() to using pool instead, in hopes of clearing up these connection issues.
Each time I call a socket it makes a connection to the database and then releases it after it is complete, or so it seems. It sounds like this is not a very scalable approach, and that using pool will help run multiple queries in parallel.
db.js:
import mysql from 'promise-mysql';
import env from '../../../env.config.json';
const db = async (sql, descriptor, serializedParameters = []) => {
return new Promise( async (resolve, reject) => {
try {
const connection = await mysql.createConnection({
host: env.DB.HOST,
user: env.DB.USER,
password: env.DB.PASSWORD,
database: env.DB.NAME,
port: env.DB.PORT
})
if (connection && env.ENV === "development") {
//console.log(/*"There is a connection to the db for: ", descriptor*/);
}
let result;
if(serializedParameters.length > 0) {
result = await connection.query(sql, serializedParameters)
} else result = await connection.query(sql);
connection.end();
resolve(result);
} catch (e) {
console.log("ERROR pool.db: " + e);
reject(e);
};
});
}
export default db;
This is an example of how I would create a connection to query the db
inventory.js:
import db from '../API/db';
export const selectAllFromBuildItems = () => {
return new Promise( async (resolve, reject) => {
try {
const getAllBuildItems = "SELECT * FROM mydb.build_items;"
const response = await db(getAllBuildItems, "AllBuildItems");
resolve(response);
} catch (e) {
console.log("ERROR inventory.selectAllFromBuildItems: " + e);
reject(e);
}
});
};
How can I change my code so that I use a pool instead. I have a lot of different queries that can be called from our application so I'm not quite sure what the right approach for this would be. I saw some people say that I should create the pool once and then use it throughout the application, but I don't know where that would go. If anyone has any suggestions on how I can make this switch, that would help me out a lot. Thanks!
Create the pool. Better if you create once when you run your application.
If it is in different file then you have export here and import in required file.
var pool = mysql.createPool({
host: env.DB.HOST,
user: env.DB.USER,
password: env.DB.PASSWORD,
database: env.DB.NAME,
connectionLimit: 10
});
I had to create this prototype function as the library had a bug of close connection was not returning the connection to the pool.
pool.prototype.releaseConnection = function releaseConnection(connection) {
return this.pool.releaseConnection(connection.connection);
};
Funtion for getting connection from the pool that is created earlier.
If you want you can call pool.getConnection() in all your query functions.
function connect() {
return pool.getConnection().then(function(connection) {
return connection
}).catch(function(e) {
console.log("Error Creating Connection");
throw e;
});
}
Now this is your query function to get data from dd.
function selectAllFromBuildItems() {
var sql_query = `SELECT * FROM mydb.build_items`;
return connect().then(function(conn) {
return conn.query(sql_query).then(function(rows) {
pool.releaseConnection(conn);
return rows;
});
}).catch(function(e) {
console.log("ERROR inventory.selectAllFromBuildItems: " + e);
throw e;
});
}
Update: Descriptions are added. Hope this helps you.
Related
I am trying to connect an external (not AWS) MySql server from an AWS Lambda function written in Node.js using nodejs14.x environment, but the connect() callback is not called.
I am been struggling with this problem since days, there are a lot of references to similar issues but I really tried all possible permutations of solutions I found.
I am deploying with SAM and testing both on local machine and on real AWS.
Here is the sample code of the lambda helper
const mysql = require('mysql');
exports.helloFromLambdaHandler = async () => {
const message = 'Hello from Lambda!';
console.info(`${message}`);
var sql = "SELECT 1+? AS sum";
var values = [1];
console.log("Doing createConnection");
const connection = mysql.createConnection({
/* my connection data */
});
console.log("Doing connect");
connection.connect( (err) => {
console.log("Inside connection callback");
console.log('connected as id ' + connection.threadId);
if(!err) {
console.log("DB connected, thread id is " + connection.threadId);
console.log("Doing query");
connection.query(sql, values, (err, result, values) => {
console.log("Inside query callback");
if(!err) {
console.log("Query ok!");
console.log(result);
connection.end();
} else {
console.log("Error executing query: " + err.message);
}
});
} else {
console.log("Error connecting db: "+ err.message);
}
});
console.log ("Returning...");
return message;
}
The log is
Hello from Lambda!
Doing createConnection
Doing connect
Returning...
The expected behaviour is that after "Returning..." I should see the log "Inside connection callback" then "Inside query callback" and then "Query ok!".
Instead the callback of connect() appears not invoked.
I know that I can call query() directly skipping connect() but also doing so I encounter same issue.
Any clue?
Thank you!
SOLUTION
As suggested by the accepted answer, returning a promise is the solution to let Node complete all the queue. Unfortunately it's not possible to complete the Lambda and leave it running in background in a safe manner, for what I understand.
I am investigating alternative solutions such as:
mysql2 library which supports promises natively
serverless-mysql npm package which handles shared db connections
Below the running demo code
const mysql = require('mysql');
exports.helloFromLambdaHandler = async (event, context) => {
const message = 'Hello from Lambda!';
console.info(`${message}`);
var sql = "SELECT 1+? AS sum";
var values = [1];
console.log("Doing createConnection");
const connection = mysql.createConnection({
/* my connection data */
});
console.log("Doing query");
const promise = new Promise( (resolve, reject) => {
connection.query(sql, values, (err, result, values) => {
console.log("Inside query callback");
if(!err) {
console.log("Query ok!");
console.log(result);
connection.end();
resolve(message);
} else {
console.log("Error executing query: " + err.message);
reject(err);
}
});
});
console.log ("Returning...");
return promise;
}
You are using async handler, thus your function probably completes before your connect() has a chance to execute.
To try to overcome the issue, you can use Promise as shown in AWS docs.
I have multiple modules which require my custom PromisifiedMySQL.js module. I am trying to use a connection pool from the NodeJS mysql library.
I tried setting a variable called connectionPool to hold the initialized pool. However it seems that for every module that imports my PromisifiedMySQL.js file it attempts to initialize a new pool instance.
I want ONLY one pool instance to be created which is contained inside PromisifiedMySQL.js
Why is my connectionPool not keeping its state properly?
const mysql = require('mysql');
let connectionPool = null;
const initializePool = ()=>{
if(isLiveEnv()){
connectionPool = createLivePool();
}
else if(isDevEnv()){
connectionPool = createDevPool();
}
}
if(connectionPool === null){
console.log(connectionPool === null);
console.log('init pool!');
initializePool();
}
let query = (sql, args) =>{
return new Promise((resolve,reject) => {
getConnection().then(con => {
con.query(sql,args,(err,result) => {
if(err) {
con.release();
reject(err);
}
else{
con.release();
resolve(result);
}
})
})
.catch(err =>{
reject(err);
})
})
} // end query.
My export look like this:
module.exports = {
query:query
}
Where query gets a connection from the pool, runs the query and releases the connection.
If I have module1.js and module2.js require('./Promisified.js') then
it actually makes two pools even though after the first one is created I set
connectPool is be non-null;
I am working on a discord.js bot, and I'm storing a bunch of information on various servers in a database. The problem is, that the code doesn't wait for the database to return the results. In the current situation, I'm trying to check if the server specific prefix checks out.
I tried using async and await at various places, but those didn't work. If I could, I'd rather not use .then(), because I don't really want to put all the commands inside a .then().
const { Client, Attachment, RichEmbed } = require('discord.js');
const client = new Client();
const mysql = require("mysql");
const config = require("./config.json")
var con = mysql.createConnection({
host: 'localhost',
user: 'root',
password: '',
database: 'botdb'
})
client.on("ready", () => {
console.log("I'm ready")
})
client.on("message", message => {
if (message.author.bot) return;
if (message.channel.type === 'dm') return;
let msg = message.content.split(" ");
let command = msg[0];
let prefix;
con.query(`SELECT * FROM serversettings WHERE ServerID = ${message.guild.id}`, (err, rows) => {
if (err) throw err;
prefix = rows[0].Prefix;
console.log(prefix)
})
console.log(`Prefix: ${prefix}, Command: ${command}`)
if (command === `${prefix}examplecommand`) {
//Do something
}
//Other code that uses prefix and command
}
It should log the prefix first, and then the Prefix: ${prefix}, Command: ${command} part, but it does it the other way around, so the examplecommand doesn't work.
Your result is caused by the fact that what's outside your query callback is executed immediately after the call. Keep in mind the mysql module is callback-based.
Possible Solutions
Place the code inside the callback so it's executed when the query is completed.
Wrap the query in a promise and await it.
function getGuild(guildID) {
return new Promise((resolve, reject) => {
con.query(`SELECT * FROM serversettings WHERE ServerID = '${guildID}', (err, rows) => {
if (err) return reject(err);
resolve(rows);
});
});
}
const [guild] = await getGuild(message.guild.id) // destructuring 'rows' array
.catch(console.error);
console.log(guild.prefix);
Use a Promise-based version of a MySQL wrapper, like promise-mysql. You could use it the same way as the code above, without worrying about coding your own Promises.
const [guild] = await con.query(`SELECT * FROM serversettings WHERE serverID = '${message.guild.id}'`)
.catch(console.error);
console.log(guild.prefix);
I'm using node with mysql and I have a route that does:
const mysql = require("./mysql");
router.post("/register_user", (req, res) => {
mysql.register(req.body).then((result) => {
// stuff
});
});
mysql.js:
const mysql = require("mysql");
const connection = mysql.createConnection("mysql://...");
exports.register = (req) => {
const user = { name: req.name };
return new Promise((resolve, reject) => {
// make sure user doesn't exist already
connection.query('...', [user], (err, data) => {
...
if (isNewUser) {
connection.query('INSERT INTO USER...', user, (insertErr, rows) => {
...
resolve(rows);
connection.end();
}
}
});
});
}
This works perfectly when I register the first user in my app. But immediately after, if I log out (on the web app), then register a new user, I get an error saying:
Error: Cannot enqueue Query after invoking quit.
Why doesn't this create a new connection?
I assume you are using the following NPM module mysql
If it is the case then could you simply use MySQL pooling connections ?
Rather than creating and managing connections one-by-one, this module also provides built-in connection pooling using mysql.createPool(config).
So instead of calling connection.end(); you would be calling connection.release(); instead to return connection to the pool of open connections.
I'm currently testing my node app using ApacheBench. I run into an issue with my database which is ER_CON_COUNT_ERROR: Too many connections.
I'm using a short library on the top of MySQL node module that you can see just below
var mysql = require('mysql');
var config = require('path/to/config');
var message = require('./myMessageLib.js');
var pool = mysql.createPool({
connectionLimit : 100,
host: config.db.mysql.host,
user: config.db.mysql.user,
password: config.db.mysql.password,
database: config.db.mysql.database
});
var query = function(query_str, values, next) {
pool.getConnection((err, connection) => {
if (err) {
console.error("MySQL Fail to get a connection in pool : " + err);
if (typeof connection !== "undefined")
connection.release();
next(error, null);
return ;
}
connection.query(query_str, values, function(error, data, fields) {
connection.release();
if (error)
if (config.app.env.dev)
throw (error);
else {
next(error, null);
return (message.error("MySQL query failed : " + query_str + " / err : " + error));
}
if (data.length == 0)
next(null);
else
next(data);
})
})
}
exports.query = query;
I use this library in my model by doing something like this
var mysql = require('path/to/mysqllib');
/**
* Class PlayerModel
*/
function PlayerModel() { };
PlayerModel.prototype.get = function(id, next) {
mysql.query("SELECT ....", [id], function(player) {
// stuff
})
}
module.exports = PlayerModel;
The things is on my homepage I use different models like the one presented above and each one launch a query to get some database information. When I launch an ApacheBench with only 50 concurrency levels I got the ER_CON_COUNT_ERROR: Too many connections. So I've got the feeling that the pool isn't well made because it seems that it didn't respect the connections limit of 100 written in the short MySQL lib.
I was thinking about creating and storing the pool in the global nodejs variable to be able to share it correctly accros my modules but I'm not sure it's a good way and maybe also I'm doing something wrong on my pool implentation.
Do you have any idea or improvements to suggest ?
Thanks mates!
I figured out the issue.
My app was deploying in cluster mode. Two process were running at the same time. Because of that, two pools of 100 connections could have been created which is resulting on a total of 200 connections which is higher than the MySQL default connection limit.
Great that found a solution and here's another one with less code.
create a js file, dbconnection.js for example
var mysql = require("mysql");
var pool = mysql.createPool({
connectionLimit: 10,
host: '...',
user: '...',
password: '...',
database: '...',
dateStrings: true
});
exports.connection = {
query: function () {
var queryArgs = Array.prototype.slice.call(arguments),
events = [],
eventNameIndex = {};
pool.getConnection(function (err, conn) {
if (err) {
if (eventNameIndex.error) {
eventNameIndex.error();
}
}
if (conn) {
var q = conn.query.apply(conn, queryArgs);
q.on('end', function () {
conn.release();
});
events.forEach(function (args) {
q.on.apply(q, args);
});
}
});
return {
on: function (eventName, callback) {
events.push(Array.prototype.slice.call(arguments));
eventNameIndex[eventName] = callback;
return this;
}
};
}
};
In the other file where you want to use the connection
var db = require('./dbconnection.js');
And instead of
connection.query
Use
db.connection.query