How to make multiple mysql queries in Node with promises - mysql

G'day all,
I'm trying to convert some old php code over to Node, and part of the journey has been trying to figure out the best way to perform sql queries against my database (I'm using SQL so I can port the existing database over).
I've got them working, but have encountered the "Pyramid of Doom" problem, and it's subsequent scope issues (i.e. the returned values not baing available to subsequent "then"s).
An example of the sort of code I have here is: (dbPool.queryOPromise returns a query wrapped in a promise)
dbPool.queryOPromise(query)
.then(function(result){
console.log(result);
var query = {
sql:"INSERT INTO newusers (newuserid, ipaddress, email) VALUES (?,?,?)",
values: [newuserid, ipAddress, email]
};
dbPool.queryOPromise(query)
.then(function(value){
console.log(value);
if(value.code==200) {
res.status(200).json({code:200, status:"New User Created"});
} else {
res.status(400).json({code:value.code, status:"Error creating new user: ".value.status});
}
})
})
Does anyone have a view on the best way to attack this situation?
Thanks!

You're supposed to return the subsequent promises instead of calling .then on them
dbPool.queryOPromise(query)
.then(function(result) {
console.log(result);
var query = {
sql: "INSERT INTO newusers (newuserid, ipaddress, email) VALUES (?,?,?)",
values: [newuserid, ipAddress, email]
};
// RETURN the second promise,
return dbPool.queryOPromise(query);
})
.then(function(value) {
console.log(value);
if (value.code == 200) {
res.status(200).json({code: 200, status: "New User Created"});
} else {
res.status(400).json({code: value.code, status: "Error creating new user: ".value.status });
}
})
.catch(console.error); // and always catch the errors at the end.
It's a #1 rookie mistake in using promises. Checkout this wonderfully written article addressing issues exactly like this

For node -v > 8.x only,
Share my working example:
I use this Promisified MySQL middleware for Node.js
read this article Create a MySQL Database Middleware with Node.js 8 and Async/Await
here is my database.js
var mysql = require('mysql');
// node -v must > 8.x
var util = require('util');
// !!!!! for node version < 8.x only !!!!!
// npm install util.promisify
//require('util.promisify').shim();
// -v < 8.x has problem with async await so upgrade -v to v9.6.1 for this to work.
// connection pool https://github.com/mysqljs/mysql [1]
var pool = mysql.createPool({
connectionLimit : process.env.mysql_connection_pool_Limit, // default:10
host : process.env.mysql_host,
user : process.env.mysql_user,
password : process.env.mysql_password,
database : process.env.mysql_database
})
// Ping database to check for common exception errors.
pool.getConnection((err, connection) => {
if (err) {
if (err.code === 'PROTOCOL_CONNECTION_LOST') {
console.error('Database connection was closed.')
}
if (err.code === 'ER_CON_COUNT_ERROR') {
console.error('Database has too many connections.')
}
if (err.code === 'ECONNREFUSED') {
console.error('Database connection was refused.')
}
}
if (connection) connection.release()
return
})
// Promisify for Node.js async/await.
pool.query = util.promisify(pool.query)
module.exports = pool
You must upgrade node -v > 8.x
you must use async function to be able to use await.
example:
var pool = require('./database')
// node -v must > 8.x, --> async / await
router.get('/:template', async function(req, res, next)
{
...
try {
var _sql_rest_url = 'SELECT * FROM arcgis_viewer.rest_url WHERE id='+ _url_id;
var rows = await pool.query(_sql_rest_url)
_url = rows[0].rest_url // first record, property name is 'rest_url'
if (_center_lat == null) {_center_lat = rows[0].center_lat }
if (_center_long == null) {_center_long= rows[0].center_long }
if (_center_zoom == null) {_center_zoom= rows[0].center_zoom }
_place = rows[0].place
} catch(err) {
throw new Error(err)
}

Related

How to switch from using mysql.createConnection to mysql.createPool

I'm fairly new to how database connections work using nodejs, and I'm having issues with database connections that aren't being closed properly. I've asked a few questions on here before about it, and it seems like everyone is telling me to use pool instead of the way I have been doing it. The only problem is that when I search online about using pool from promise-mysql, everyone seems to use a very simple and generic approach, but I'm using it within a complex application using sockets. So I'm wondering how I can switch my old approach using createConnection() to using pool instead, in hopes of clearing up these connection issues.
Each time I call a socket it makes a connection to the database and then releases it after it is complete, or so it seems. It sounds like this is not a very scalable approach, and that using pool will help run multiple queries in parallel.
db.js:
import mysql from 'promise-mysql';
import env from '../../../env.config.json';
const db = async (sql, descriptor, serializedParameters = []) => {
return new Promise( async (resolve, reject) => {
try {
const connection = await mysql.createConnection({
host: env.DB.HOST,
user: env.DB.USER,
password: env.DB.PASSWORD,
database: env.DB.NAME,
port: env.DB.PORT
})
if (connection && env.ENV === "development") {
//console.log(/*"There is a connection to the db for: ", descriptor*/);
}
let result;
if(serializedParameters.length > 0) {
result = await connection.query(sql, serializedParameters)
} else result = await connection.query(sql);
connection.end();
resolve(result);
} catch (e) {
console.log("ERROR pool.db: " + e);
reject(e);
};
});
}
export default db;
This is an example of how I would create a connection to query the db
inventory.js:
import db from '../API/db';
export const selectAllFromBuildItems = () => {
return new Promise( async (resolve, reject) => {
try {
const getAllBuildItems = "SELECT * FROM mydb.build_items;"
const response = await db(getAllBuildItems, "AllBuildItems");
resolve(response);
} catch (e) {
console.log("ERROR inventory.selectAllFromBuildItems: " + e);
reject(e);
}
});
};
How can I change my code so that I use a pool instead. I have a lot of different queries that can be called from our application so I'm not quite sure what the right approach for this would be. I saw some people say that I should create the pool once and then use it throughout the application, but I don't know where that would go. If anyone has any suggestions on how I can make this switch, that would help me out a lot. Thanks!
Create the pool. Better if you create once when you run your application.
If it is in different file then you have export here and import in required file.
var pool = mysql.createPool({
host: env.DB.HOST,
user: env.DB.USER,
password: env.DB.PASSWORD,
database: env.DB.NAME,
connectionLimit: 10
});
I had to create this prototype function as the library had a bug of close connection was not returning the connection to the pool.
pool.prototype.releaseConnection = function releaseConnection(connection) {
return this.pool.releaseConnection(connection.connection);
};
Funtion for getting connection from the pool that is created earlier.
If you want you can call pool.getConnection() in all your query functions.
function connect() {
return pool.getConnection().then(function(connection) {
return connection
}).catch(function(e) {
console.log("Error Creating Connection");
throw e;
});
}
Now this is your query function to get data from dd.
function selectAllFromBuildItems() {
var sql_query = `SELECT * FROM mydb.build_items`;
return connect().then(function(conn) {
return conn.query(sql_query).then(function(rows) {
pool.releaseConnection(conn);
return rows;
});
}).catch(function(e) {
console.log("ERROR inventory.selectAllFromBuildItems: " + e);
throw e;
});
}
Update: Descriptions are added. Hope this helps you.

Save Query result into Variable Alexa Skills Json

I needed a DB for an alexa app, so I set up and and it INSERTS nicely, but when im trying to SELECT and save it to a variable the values saved to the variable are [Object Object] instead of wanted value, I know it can be async problem or parsing problem but i just cant fix the code, some help would be cool,
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest'
&& Alexa.getIntentName(handlerInput.requestEnvelope) === 'buscaIntent';
},
handle(handlerInput) {
const mysql = require('mysql');
const connection = mysql.createConnection
({
host: 'remotemysql.com',
user: 'RBb34534sd',
password: 'xxxxxxxxx',
database: 'RBsdfewrg'
});
var stat = connection.query('SELECT `spe` FROM `prueba` WHERE `nombre` LIKE "raichu" limit 1', function (err, result, fields) {
if (err) throw err;
console.log(result);
return result[0];
});
connection.end();
return handlerInput.responseBuilder
.speak("Busc " + stat)
.reprompt("reprompt buscar")
.getResponse();
}
}; ```
The issue is that you're not waiting for your database query to complete before sending your response to the Alexa service. Requests in node.js are non-blocking, meaning you either need to nest the request with a callback, or leverage Promises / async-await patterns so that the SQL query is processed before the function is fully executed.
You can read more on converting the built-in library for SQL connections to support Promises here, or use a library like this that already has a wrapper in place.
In either scenario, the end result would be refactored to something like this:
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest'
&& Alexa.getIntentName(handlerInput.requestEnvelope) === 'buscaIntent';
},
async handle(handlerInput) {
const mysql = require('mysql2/promise');
const connection = await mysql.createConnection
({
host: 'remotemysql.com',
user: 'RBb34534sd',
password: 'xxxxxxxxx',
database: 'RBsdfewrg'
});
var stat = await connection.execute('SELECT `spe` FROM `prueba` WHERE `nombre` LIKE "raichu" limit 1', function (err, result, fields) {
if (err) throw err;
console.log(result);
return result[0];
});
return handlerInput.responseBuilder
.speak("Busc " + stat)
.reprompt("reprompt buscar")
.getResponse();
}
Another article describing async calls for Alexa requests here.
I think the query is returning an object you can't keep the object in speech. Check what's inside the object and if you have a field that you want inside that object then access by stat.YourField.

discord.js/node.js make code wait until sql query returns result

I am working on a discord.js bot, and I'm storing a bunch of information on various servers in a database. The problem is, that the code doesn't wait for the database to return the results. In the current situation, I'm trying to check if the server specific prefix checks out.
I tried using async and await at various places, but those didn't work. If I could, I'd rather not use .then(), because I don't really want to put all the commands inside a .then().
const { Client, Attachment, RichEmbed } = require('discord.js');
const client = new Client();
const mysql = require("mysql");
const config = require("./config.json")
var con = mysql.createConnection({
host: 'localhost',
user: 'root',
password: '',
database: 'botdb'
})
client.on("ready", () => {
console.log("I'm ready")
})
client.on("message", message => {
if (message.author.bot) return;
if (message.channel.type === 'dm') return;
let msg = message.content.split(" ");
let command = msg[0];
let prefix;
con.query(`SELECT * FROM serversettings WHERE ServerID = ${message.guild.id}`, (err, rows) => {
if (err) throw err;
prefix = rows[0].Prefix;
console.log(prefix)
})
console.log(`Prefix: ${prefix}, Command: ${command}`)
if (command === `${prefix}examplecommand`) {
//Do something
}
//Other code that uses prefix and command
}
It should log the prefix first, and then the Prefix: ${prefix}, Command: ${command} part, but it does it the other way around, so the examplecommand doesn't work.
Your result is caused by the fact that what's outside your query callback is executed immediately after the call. Keep in mind the mysql module is callback-based.
Possible Solutions
Place the code inside the callback so it's executed when the query is completed.
Wrap the query in a promise and await it.
function getGuild(guildID) {
return new Promise((resolve, reject) => {
con.query(`SELECT * FROM serversettings WHERE ServerID = '${guildID}', (err, rows) => {
if (err) return reject(err);
resolve(rows);
});
});
}
const [guild] = await getGuild(message.guild.id) // destructuring 'rows' array
.catch(console.error);
console.log(guild.prefix);
Use a Promise-based version of a MySQL wrapper, like promise-mysql. You could use it the same way as the code above, without worrying about coding your own Promises.
const [guild] = await con.query(`SELECT * FROM serversettings WHERE serverID = '${message.guild.id}'`)
.catch(console.error);
console.log(guild.prefix);

NodeJs - How to share MySQL pool accross my models to avoid 'ER_CON_COUNT_ERROR'

I'm currently testing my node app using ApacheBench. I run into an issue with my database which is ER_CON_COUNT_ERROR: Too many connections.
I'm using a short library on the top of MySQL node module that you can see just below
var mysql = require('mysql');
var config = require('path/to/config');
var message = require('./myMessageLib.js');
var pool = mysql.createPool({
connectionLimit : 100,
host: config.db.mysql.host,
user: config.db.mysql.user,
password: config.db.mysql.password,
database: config.db.mysql.database
});
var query = function(query_str, values, next) {
pool.getConnection((err, connection) => {
if (err) {
console.error("MySQL Fail to get a connection in pool : " + err);
if (typeof connection !== "undefined")
connection.release();
next(error, null);
return ;
}
connection.query(query_str, values, function(error, data, fields) {
connection.release();
if (error)
if (config.app.env.dev)
throw (error);
else {
next(error, null);
return (message.error("MySQL query failed : " + query_str + " / err : " + error));
}
if (data.length == 0)
next(null);
else
next(data);
})
})
}
exports.query = query;
I use this library in my model by doing something like this
var mysql = require('path/to/mysqllib');
/**
* Class PlayerModel
*/
function PlayerModel() { };
PlayerModel.prototype.get = function(id, next) {
mysql.query("SELECT ....", [id], function(player) {
// stuff
})
}
module.exports = PlayerModel;
The things is on my homepage I use different models like the one presented above and each one launch a query to get some database information. When I launch an ApacheBench with only 50 concurrency levels I got the ER_CON_COUNT_ERROR: Too many connections. So I've got the feeling that the pool isn't well made because it seems that it didn't respect the connections limit of 100 written in the short MySQL lib.
I was thinking about creating and storing the pool in the global nodejs variable to be able to share it correctly accros my modules but I'm not sure it's a good way and maybe also I'm doing something wrong on my pool implentation.
Do you have any idea or improvements to suggest ?
Thanks mates!
I figured out the issue.
My app was deploying in cluster mode. Two process were running at the same time. Because of that, two pools of 100 connections could have been created which is resulting on a total of 200 connections which is higher than the MySQL default connection limit.
Great that found a solution and here's another one with less code.
create a js file, dbconnection.js for example
var mysql = require("mysql");
var pool = mysql.createPool({
connectionLimit: 10,
host: '...',
user: '...',
password: '...',
database: '...',
dateStrings: true
});
exports.connection = {
query: function () {
var queryArgs = Array.prototype.slice.call(arguments),
events = [],
eventNameIndex = {};
pool.getConnection(function (err, conn) {
if (err) {
if (eventNameIndex.error) {
eventNameIndex.error();
}
}
if (conn) {
var q = conn.query.apply(conn, queryArgs);
q.on('end', function () {
conn.release();
});
events.forEach(function (args) {
q.on.apply(q, args);
});
}
});
return {
on: function (eventName, callback) {
events.push(Array.prototype.slice.call(arguments));
eventNameIndex[eventName] = callback;
return this;
}
};
}
};
In the other file where you want to use the connection
var db = require('./dbconnection.js');
And instead of
connection.query
Use
db.connection.query

Node.js MySQL Needing Persistent Connection

I need a persistent MySQL connection for my Node web app. The problem is that this happens about a few times a day:
Error: Connection lost: The server closed the connection.
at Protocol.end (/var/www/n/node_modules/mysql/lib/protocol/Protocol.js:73:13)
at Socket.onend (stream.js:79:10)
at Socket.EventEmitter.emit (events.js:117:20)
at _stream_readable.js:895:16
at process._tickCallback (node.js:415:13)
error: Forever detected script exited with code: 8
error: Forever restarting script for 2 time
info: socket.io started
Here is my connection code:
// Yes I know multipleStatements can be dangerous in the wrong hands.
var sql = mysql.createConnection({
host: 'localhost',
user: 'my_username',
password: 'my_password',
database: 'my_database',
multipleStatements: true
});
sql.connect();
function handleDisconnect(connection) {
connection.on('error', function(err) {
if (!err.fatal) {
return;
}
if (err.code !== 'PROTOCOL_CONNECTION_LOST') {
throw err;
}
console.log('Re-connecting lost connection: ' + err.stack);
sql = mysql.createConnection(connection.config);
handleDisconnect(sql);
sql.connect();
});
}
handleDisconnect(sql);
As you can see, the handleDisconnect code does not work..
Use the mysql connection pool. It will reconnect when a connection dies and you get the added benefit of being able to make multiple sql queries at the same time. If you don't use the database pool, your app will block database requests while waiting for currently running database requests to finish.
I usually define a database module where I keep my queries separate from my routes. It looks something like this...
var mysql = require('mysql');
var pool = mysql.createPool({
host : 'example.org',
user : 'bob',
password : 'secret'
});
exports.getUsers = function(callback) {
pool.getConnection(function(err, connection) {
if(err) {
console.log(err);
callback(true);
return;
}
var sql = "SELECT id,name FROM users";
connection.query(sql, [], function(err, results) {
connection.release(); // always put connection back in pool after last query
if(err) {
console.log(err);
callback(true);
return;
}
callback(false, results);
});
});
});
I know this is super delayed, but I've written a solution to this that I think might be a bit more generic and usable. I had written an app entirely dependent on connection.query() and switching to a pool broke those calls.
Here's my solution:
var mysql = require('mysql');
var pool = mysql.createPool({
host : 'localhost',
user : 'user',
password : 'secret',
database : 'test',
port : 3306
});
module.exports = {
query: function(){
var sql_args = [];
var args = [];
for(var i=0; i<arguments.length; i++){
args.push(arguments[i]);
}
var callback = args[args.length-1]; //last arg is callback
pool.getConnection(function(err, connection) {
if(err) {
console.log(err);
return callback(err);
}
if(args.length > 2){
sql_args = args[1];
}
connection.query(args[0], sql_args, function(err, results) {
connection.release(); // always put connection back in pool after last query
if(err){
console.log(err);
return callback(err);
}
callback(null, results);
});
});
}
};
This instantiates the pool once, then exports a method named query. Now, when connection.query() is called anywhere, it calls this method, which first grabs a connection from the pool, then passes the arguments to the connection. It has the added effect of grabbing the callback first, so it can callback any errors in grabbing a connection from the pool.
To use this, simply require it as module in place of mysql. Example:
var connection = require('../middleware/db');
function get_active_sessions(){
connection.query('Select * from `sessions` where `Active`=1 and Expires>?;', [~~(new Date()/1000)], function(err, results){
if(err){
console.log(err);
}
else{
console.log(results);
}
});
}
This looks just like the normal query, but actually opens a pool and grabs a connection from the pool in the background.
In response to #gladsocc question:
Is there a way to use pools without refactoring everything? I have
dozens of SQL queries in the app.
This is what I ended up building. It's a wrapper for the query function. It will grab the connection, do the query, then release the connection.
var pool = mysql.createPool(config.db);
exports.connection = {
query: function () {
var queryArgs = Array.prototype.slice.call(arguments),
events = [],
eventNameIndex = {};
pool.getConnection(function (err, conn) {
if (err) {
if (eventNameIndex.error) {
eventNameIndex.error();
}
}
if (conn) {
var q = conn.query.apply(conn, queryArgs);
q.on('end', function () {
conn.release();
});
events.forEach(function (args) {
q.on.apply(q, args);
});
}
});
return {
on: function (eventName, callback) {
events.push(Array.prototype.slice.call(arguments));
eventNameIndex[eventName] = callback;
return this;
}
};
}
};
And I use it like I would normally.
db.connection.query("SELECT * FROM `table` WHERE `id` = ? ", row_id)
.on('result', function (row) {
setData(row);
})
.on('error', function (err) {
callback({error: true, err: err});
});