Which database pool middleware approach is more efficient? - mysql

I'd like to know how these approaches work, as reading the documentation just confuses me exactly what happens.
1. Pool module, required where needed
Does a pool get created each time we require the pool? If the pool module is required in 3 places, will 3 separate pools exist?
pool.js
const pool = mysql.createPool(db);
pool.query = util.promisify(pool.query);
module.exports = pool;
api.js
const pool = require("../pool");
await pool.query(query);
2. Route middleware, with a passed in request property
This way there is only one pool (I think), and only that one is used. Does it work as I think it does?
app.js
const middleware = require("./middleware.js");
let app = express();
let pool = mysql.createPool(db);
app.all("/api/*", middleware(pool));
middleware.js
module.exports = (pool) => {
return (req, res, next) => {
pool.getConnection((err, conn) => {
const asyncQuery = util.promisify(conn.query).bind(conn);
req.asyncQuery = asyncQuery; // <--------- pass pool up the chain
req.connection = conn;
next();
});
};
};
api.js
router.get("/api/route", async (req, res, next) => {
try {
await req.asyncQuery(query); // <-------- use the passed in pool
res.status(200).send(rows);
} catch (err) {
next(err);
} finally {
req.connection.release();
}
});

Related

nodejs mysql connection pool keeps re-initializing after every require statement

I have multiple modules which require my custom PromisifiedMySQL.js module. I am trying to use a connection pool from the NodeJS mysql library.
I tried setting a variable called connectionPool to hold the initialized pool. However it seems that for every module that imports my PromisifiedMySQL.js file it attempts to initialize a new pool instance.
I want ONLY one pool instance to be created which is contained inside PromisifiedMySQL.js
Why is my connectionPool not keeping its state properly?
const mysql = require('mysql');
let connectionPool = null;
const initializePool = ()=>{
if(isLiveEnv()){
connectionPool = createLivePool();
}
else if(isDevEnv()){
connectionPool = createDevPool();
}
}
if(connectionPool === null){
console.log(connectionPool === null);
console.log('init pool!');
initializePool();
}
let query = (sql, args) =>{
return new Promise((resolve,reject) => {
getConnection().then(con => {
con.query(sql,args,(err,result) => {
if(err) {
con.release();
reject(err);
}
else{
con.release();
resolve(result);
}
})
})
.catch(err =>{
reject(err);
})
})
} // end query.
My export look like this:
module.exports = {
query:query
}
Where query gets a connection from the pool, runs the query and releases the connection.
If I have module1.js and module2.js require('./Promisified.js') then
it actually makes two pools even though after the first one is created I set
connectPool is be non-null;

Angular HTTPClient (HTTP) requests pending forever

I have recently started working with MySQL as the database for my Angular/NodeJS project (I have been using MongoDB all along). Nonetheless, I'm encountering issues when handling HTTP Requests. I have experimented with GET and POST requests as of now, and GET is forever pending, until failure and POST doesn't post to backend and to the database, likewise. I really hadn't changed the backend configuration from the one I used with MongoDB database, except for the queries, of course.
I have tried debugging the backend to check whether the server is actually running and everything was okay. It just came to requests reaching the specified endpoints that they're always pending. I also tried to log to console if a request gets at a certain endpoint, but nothing was being logged, unfortunately.
server.js
const app = require("./backend/app");
const debug = require("debug")("node-angular");
const http = require("http");
const normalisePort = setPort => {
const port = parseInt(setPort, 10);
if (isNaN(port)) return setPort;
if (port >= 0) return port;
return false;
};
const port = normalisePort(process.env.PORT || "8000");
const server = http.createServer(app);
const error = error => {
if (error.syscall !== "listen") {
throw error;
}
const bind = typeof port === "string" ? "pipe " + port : "port " + port;
switch (error.code) {
case "EACCES":
console.error(bind + " requires elevated privileges");
process.exit(1);
break;
case "EADDRINUSE":
console.error(bind + " is already in use");
process.exit(1);
break;
default:
throw error;
}
};
const listening = () => {
const address = server.address();
const bind = typeof port === "string" ? "pipe " + address : "port " + port;
debug.enabled = true;
debug("Listening on " + bind);
};
app.set("port", port);
server.on("error", error);
server.on("listening", listening);
server.listen(port, "localhost");
app.js
const express = require("express");
const bodyParser = require("body-parser");
const cors = require("cors");
const users = require("./routes/users");
const app = express();
app.use(cors);
app.use(bodyParser.json());
app.use(
bodyParser.urlencoded({
extended: false
})
);
app.use((req, res, next) => {
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader(
"Access-Control-Allow-Headers",
"Origin, X-Requested-With, Authorization, Content-Type, Accept"
);
res.setHeader(
"Access-Control-Allow-Methods",
"GET, POST, PATCH, DELETE, OPTIONS"
);
next();
});
app.get("/api/users", users);
module.exports = app;
users.js
const express = require("express");
const router = express.Router();
const db = require("../sql-connection");
router.get("", (req, res, next) => {
db.query("select * from users;", (error, results, fields) => {
if (results.length > 0) {
return res.status(200).send(results);
} else {
return res.status(404).send();
}
});
});
module.exports = router;
sql-connection.js
const mysql = require("mysql");
const sqlConnection = mysql.createConnection({
host: "localhost",
user: "root",
password: "",
database: "payroll"
});
sqlConnection.connect(error => {
if (error) throw error;
console.log("connected to database");
});
module.exports = sqlConnection;
auth.service.ts
export class AuthService {
private _BASE_URL: string = "http://localhost:8000/api";
constructor(private http: HttpClient) {}
public get users(): Observable<any> {
return this.http.get(this._BASE_URL + "/users");
}
}
signup.component.ts
export class SignUpComponent {
constructor(private _authService: AuthService) {}
public onSignUp(): void {
this._authService
.users()
.subscribe(data => (data ? console.log(data) : console.log("no data")));
}
}
When subscribed to the users observable data from backend should logged to console if present, otherwise, 'no data' is logged on the console. Unfortunately, this request takes forever (pending). However, if I don't subscribe to users no request is sent/seen under network tab in dev tools.
I've been using MYSQL database and I would recommend using mysql2 over mysql
mysql2 provides promise based syntaxes over conventional callback methods.
Here's the documentation for Mysql2 for nodejs.
Coming to the problem, I guess it might be because Nodejs is asynchronous while you're using a synchronous approach in setting up the API.
Also when you're working with Asynchronous programming you have to use try-catch-finally instead of conventional if-else statements to log the errors.
So you can use async (req, res, next)=>{ //your code here } rather than just using (req, res, next)=>{ //your code here }.
Also you have to await before calling the sql query, i.e;
await db.query
or
rather in mysql2 it is easier to use const [data] = await pool.execute(query, [params]).

Cannot read property 'findAll' of undefined sequelize

I'm a new learner to express js and sequelizejs. I successfully migrate table in my database so the connection is fine I guess.
Here is my code.
https://github.com/Picks42/express-test
Please review this file
https://github.com/Picks42/express-test/blob/master/models/user.js
Then review this one
https://github.com/Picks42/express-test/blob/master/controller/test.js
Let me know what's the issue.
// all the models using your index.js loader
const models = require('../models');
// the user model, note the capital User since
const M_Bank = models.User;
exports.getTest = function(req,res){
return M_Bank
.findAll()
// don't use M_Bank here since you are getting an array of Instances of the Model
.then(users => res.status(200).send(users))
.catch((error) => {
console.log(error.toString());
res.status(400).send(error)
});
/* this will never execute because it is after the return
exports.index = function (request, response, next) {
response.json((M_Bank.findAll()));
};
*/
};
If you have the option of using async/await it makes for more readable code.
const models = require('../models');
const M_Bank = models.User;
exports.getTest = async function(req, res) {
try {
const users = await M_Bank.findAll();
return res.status(200).send(users);
} catch (err) {
console.log(err.toString());
return res.status(400).send(err);
}
};
You should get rid of the .User field in the 3rd line. because you've exported User itself from the models/user file.
Also, I recommend you not to mess with variables names. M_Bank variable doesn't speak itself
const M_Bank = require('../models/user');
exports.getTest = function(req,res){
return M_Bank
.findAll()
.then(M_Bank => res.status(200).send(M_Bank))
.catch((error) => {
console.log(error.toString());
res.status(400).send(error)
});
exports.index = function (request, response, next) {
response.json((M_Bank.findAll()));
};
};

Add transaction to sequelize create module

I'm starting with sequelize to create an API and I'm facing an issue with transactions.
My sequelize database configuration looks like this:
var Sequelize = require('sequelize');
var sequelize = new Sequelize(CONFIG.database, env.user,
env.password, {
host: env.host,
dialect: env.dialect,
port: env.port,
operatorsAliases: false
});
var db = {};
fs.readdirSync(__dirname).filter(function (file) {
return (file.indexOf('.') !== 0) && (file !== 'index.js');
}).forEach(function (file) {
var model = sequelize.import(path.join(__dirname, file));
db[model.name] = model;
});
Object.keys(db).forEach(function (modelName) {
if ('associate' in db[modelName]) {
db[modelName].associate(db);
}
});
db.sequelize = sequelize;
db.Sequelize = Sequelize;
module.exports = db;
Then I have a stockcontroller with functions to save in database like this:
var exports = module.exports = {}
let Stock = require('../models').Stock;
let StockVariant = require('../models').StockVariant;
exports.create = function (req, res) {
const body = req.body;
console.log(body);
Stock.create(body).then(function (stock, created) {})...}
I want to create transaction to save into stockvariant and stock tables in one single transaction and have the option to rollback when error.
Documentation in sequelize doesn't look easy for me to understand as I don't see how to apply this
return sequelize.transaction(function (t) { return User.create({}) })
because t is of course not defined anywhere and my stockcontroller doesn't import sequelize.
So in the end I don't understand the basic concept of how to define that transaction function to create a new stock line.
Thanks for your help!
The sequelize instance needs to be imported to use transactions. It is already exported in your database configuration file with this line db.sequelize = sequelize.
All you need to do is adding it in the current imports :
var exports = module.exports = {}
const Stock = require('../models').Stock; // Prefer const usage to avoid overwritting imports
const StockVariant = require('../models').StockVariant;
const sequelize = require('../models').sequelize;
This could also be done in one line using destructuring :
const { Stock, StockVariant, sequelize } = require('../models');
Now let's come to the transaction. As stated in the documentation, you have two ways of handling them : managed or unmanaged.
Managed transaction
This is done by chaining your asynchronous operations inside the sequelize transaction callback. In this case, if the operations linked to the transaction succeed, the transaction will commit automatically, otherwise it will rollback.
exports.create = function (req, res) {
const body = req.body;
console.log(body);
sequelize.transaction(function(t) {
return Stock.create(body, {transaction: t}) // We pass the transaction as a parameter here !
.then(function(stock, created) {
return StockVariant.create(..., {transaction: t}) // The transaction, again here
})
.catch(function(err) {
// Handle your error...
});
}
Unmanaged transaction
If you want more transparency and/or control over your transaction, you can use an unmanaged transaction. In this case, you must call commit and rollback manually.
exports.create = function (req, res) {
const body = req.body;
console.log(body);
sequelize.transaction
.then(function(t) { // Note the 'then' usage here
return Stock.create(body, {transaction: t}); // We pass the transaction as a parameter here !
.then(function(stock, created) {
return StockVariant.create(..., {transaction: t}); // The transaction, again here
});
.then(function() {
return t.commit();
})
.catch(function(err) {
return t.rollback();
});
}
This could also be done with async / await syntax, which may be more pleasant to read :
exports.create = function (req, res) {
const body = req.body;
console.log(body);
let t; // The variable in which the transaction object will be stored
try {
t = await sequelize.transaction();
const stock = await Stock.create(body, {transaction: t})
await StockVariant.create(..., {transaction: t}) // Whatever parameter you need to pass here
await t.commit();
} catch (err) {
await t.rollback();
}
}

node.js mysql pool connection with async/ await

Is there a way to use pool.getConnection() taken from the mysqljs/mysql lib with the async/ await syntax?
The idea is to have a method which returns one connection which can be passed around amongst write queries with various foreign key constraints (sequential queries) before releasing it and at the same time potentially get further connections from the pool for the purpose of various read queries (parallel).
Share my working example:
I use this Promisified MySQL middleware for Node.js
read this article Create a MySQL Database Middleware with Node.js 8 and Async/Await
here is my database.js
var mysql = require('mysql');
// node -v must > 8.x
var util = require('util');
// !!!!! for node version < 8.x only !!!!!
// npm install util.promisify
//require('util.promisify').shim();
// -v < 8.x has problem with async await so upgrade -v to v9.6.1 for this to work.
// connection pool https://github.com/mysqljs/mysql [1]
var pool = mysql.createPool({
connectionLimit : process.env.mysql_connection_pool_Limit, // default:10
host : process.env.mysql_host,
user : process.env.mysql_user,
password : process.env.mysql_password,
database : process.env.mysql_database
})
// Ping database to check for common exception errors.
pool.getConnection((err, connection) => {
if (err) {
if (err.code === 'PROTOCOL_CONNECTION_LOST') {
console.error('Database connection was closed.')
}
if (err.code === 'ER_CON_COUNT_ERROR') {
console.error('Database has too many connections.')
}
if (err.code === 'ECONNREFUSED') {
console.error('Database connection was refused.')
}
}
if (connection) connection.release()
return
})
// Promisify for Node.js async/await.
pool.query = util.promisify(pool.query)
module.exports = pool
You must upgrade node -v > 8.x
you must use async function to be able to use await.
example:
var pool = require('./database')
// node -v must > 8.x, --> async / await
router.get('/:template', async function(req, res, next)
{
...
try {
var _sql_rest_url = 'SELECT * FROM arcgis_viewer.rest_url WHERE id='+ _url_id;
var rows = await pool.query(_sql_rest_url)
_url = rows[0].rest_url // first record, property name is 'rest_url'
if (_center_lat == null) {_center_lat = rows[0].center_lat }
if (_center_long == null) {_center_long= rows[0].center_long }
if (_center_zoom == null) {_center_zoom= rows[0].center_zoom }
_place = rows[0].place
} catch(err) {
throw new Error(err)
}
Mates. I don't know why but I tried all the day long but couldn't get it to work. By the help of your comments I tried again and it of course does work.
db.js:
const pool = mysql.createPool(config);
exports.getConnection = () => {
return new Promise((resolve, reject) => {
pool.getConnection(function (err, connection) {
if (err) {
return reject(err);
}
resolve(connection);
});
});
};
someWhereElse.js:
const db = require('./db');
const wrappingFunction = async () => {
const connection = await db.getConnection();
console.log(connection);
};
wrappingFunction();
Seems like implementing promises manually is a better option.
Just sharing what I have used in my code -
const mysql = require('mysql');
const config = require('config');
const pool = mysql.createPool(config.get('db.mysql'));
module.exports = {
checkConnection: () => {
return new Promise((resolve, reject) => {
pool.getConnection((err, conn) => {
if (err) {
return reject(err);
}
resolve(conn.release());
});
});
},
pool,
closeConnection: () => pool.end(),
};
Previous answers (with util.promisify) did not work for me, and only implementing Promise manually works:
Function:
async function removeItem (id) {
return new Promise( (resolve) => {
pool.query('DELETE FROM table_name WHERE id=' + id, (error) => {
resolve ({result: !error});
});
});
}
Usage:
const app = express();
const mysql = require('mysql');
const pool = mysql.createPool({
connectionLimit: 10,
host: 'localhost',
user: 'login',
password: 'pass',
database: 'dbname'
});
app.post("/:id", async (req, res) => {
const answer = await itemRemove(id);
res.send(answer);
});
Sure, you would have to promisify it first, which you can do since node 8.0.0 now:
const util = require('util');
async function doSomething() {
const getConnectionAsync = util.promisify(pool.getConnection);
try {
const result = await getConnectionAsync('MASTER');
}catch(err) {
console.log('Oh no');
}
}
If for some reason you can't use node 8 or above, there are other ways to promisify it, like http://bluebirdjs.com/docs/api/promise.promisify.html
Just sharing what I've always use in my code:
//Filename: MySQL.js
module.exports = {
connect: function ()
{
return new Promise((resolve, reject) => {
let pool = Mysql.createPool({ //require configfile.js or just put connection detail here
connectionLimit: config.mysql.connectionLimit,
host: config.mysql.host,
user: config.mysql.user,
password: config.mysql.password,
database: config.mysql.database
});
pool.getConnection((err, connection) =>
{
try
{
if (connection)
{
resolve({"status":"success", "data":"MySQL connected.", "con":pool});
connection.release();
}
}
catch (err)
{
reject({"status":"failed", "error":`MySQL error. ${err}`});
}
resolve({"status":"failed", "error":"Error connecting to MySQL."});
});
});
}
}
Then whenever you need to call the connection to MySQL
//Filename: somefile.js
const useMySQL = require('./path/to/MySQL');
module.exports = {
getSomething: function () {
return new Promise(async (resolve) => {
try
{
let connection = await useMySQL.connect();
con = connection.con;
//Do some query here, then
resolve(`Send some result/handle error`);
}
catch (err)
{
//Handle error if any, log, etc, and eventually
resolve(err);
}
});
}
Hope this helps.