I am trying to recreate mysql Nodejs transaction using async await syntax. This is what the documentation shows without async/await:
connection.beginTransaction(function(err) {
if (err) { throw err; }
connection.query('INSERT INTO posts SET title=?', title, function (error, results, fields) {
if (error) {
return connection.rollback(function() {
throw error;
});
}
var log = 'Post ' + results.insertId + ' added';
connection.query('INSERT INTO log SET data=?', log, function (error, results, fields) {
if (error) {
return connection.rollback(function() {
throw error;
});
}
connection.commit(function(err) {
if (err) {
return connection.rollback(function() {
throw err;
});
}
console.log('success!');
});
});
});
});
I have created an async pool that I can use with async await:
let util=require('util')
let pool = mysql.createPool({
connectionLimit: 20,
host: keys.connection.host,
user: keys.connection.user,
password: keys.connection.password,
database: keys.connection.database,
dateStrings: true
// debug:true //Set this to true for verbose debugging. Leaving this to default for now cause it is creating too many messages at my console
})
pool.getConnection((err, connection) => {
if (err) {
if (err.code === 'PROTOCOL_CONNECTION_LOST') {
console.error('Database connection was closed.')
}
if (err.code === 'ER_CON_COUNT_ERROR') {
console.error('Database has too many connections.')
}
if (err.code === 'ECONNREFUSED') {
console.error('Database connection was refused.')
}
}
if (connection) connection.release()
return
})
pool.query = util.promisify(pool.query)
module.exports = pool
To convert the documentation to async/await I tried to do :
const connection = await pool.getConnection();
await connection.beginTransaction();
but I am getting:
TypeError: Cannot read property 'beginTransaction' of undefined
Related
I am using Nodejs MySQL and tried to create database level transaction so that I can execute a bunch of statements in a batch and rollback if there is an error in any step. I tried to follow this tutorial.
My database module is:
let mysql = require('mysql')
let keys = require('../config/keys')
let util = require('util')
let pool = mysql.createPool({
connectionLimit: 20,
host: keys.connection.host,
user: keys.connection.user,
password: keys.connection.password,
database: keys.connection.database,
dateStrings: true
// debug:true //Set this to true for verbose debugging. Leaving this to default for now cause it is creating too many messages at my console
})
pool.getConnection((err, connection) => {
if (err) {
if (err.code === 'PROTOCOL_CONNECTION_LOST') {
console.error('Database connection was closed.')
}
if (err.code === 'ER_CON_COUNT_ERROR') {
console.error('Database has too many connections.')
}
if (err.code === 'ECONNREFUSED') {
console.error('Database connection was refused.')
}
}
if (connection) connection.release()
return
})
pool.query = util.promisify(pool.query)
const connection = () => {
return new Promise((resolve, reject) => {
pool.getConnection((err, connection) => {
if (err) reject(err);
console.log("MySQL pool connected: threadId " + connection.threadId);
const query = (sql, binding) => {
return new Promise((resolve, reject) => {
connection.query(sql, binding, (err, result) => {
if (err) reject(err);
resolve(result);
});
});
};
const release = () => {
return new Promise((resolve, reject) => {
if (err) reject(err);
console.log("MySQL pool released: threadId " + connection.threadId);
resolve(connection.release());
});
};
resolve({
query,
release
});
});
});
};
// const query = (sql, binding) => {
// return new Promise((resolve, reject) => {
// pool.query(sql, binding, (err, result, fields) => {
// if (err) reject(err);
// resolve(result);
// });
// });
// };
module.exports = {
pool,
connection
}
In my route, I am trying to use the connection which should allow transaction:
const mysql = require('../../middleware/database')
async function buildCoreSchemas(){
const connection = await mysql.connection();
try{
await connection.query("START TRANSACTION");
await connection.query(`CREATE TABLE adjustreason (
AdjustID int NOT NULL AUTO_INCREMENT,
AdjustReason varchar(100) NOT NULL,
PRIMARY KEY (AdjustID)
)`)
await connection.query(`insert into adjustreason(AdjustReason) values('sdsds')`)
await connection.query(`insert into adjustreason(FAKECOLUMN) values('sdsds')`)
await connection.query("COMMIT");
}
catch(err){
await connection.query("ROLLBACK");
console.log(err)
return false
}
finally {
await connection.release();
}
As you can see I my second insert statement is wrong as there is no column name called FAKE COLUMN. So, the error gets caught and I get the error message in my console:
Unknown column 'FAKECOLUMN' in 'field list
But when I go and look at my database the transaction is not rollbacked because I can see that the first record is still there. What am I doing wrong?
Ciao, try to modify code in this way:
connection.beginTransaction(function(err) {
if (err) { throw err; }
connection.query(`CREATE TABLE adjustreason (
AdjustID int NOT NULL AUTO_INCREMENT,
AdjustReason varchar(100) NOT NULL,
PRIMARY KEY (AdjustID)
)`, function (error, results, fields) {
if (error) {
return connection.rollback(function() {
throw error;
});
}
connection.query(`insert into adjustreason(AdjustReason) values('sdsds')`, function
(error, results, fields) {
if (error) {
return connection.rollback(function() {
throw error;
});
}
connection.query(`insert into adjustreason(FAKECOLUMN) values('sdsds')`, function
(error, results, fields) {
if (error) {
return connection.rollback(function() {
throw error;
});
}
connection.commit(function(err) {
if (err) {
return connection.rollback(function() {
throw err;
});
}
console.log('success!');
});
});
});
});
});
so you call connection.query inside connection.beginTransaction and if one of those query fails, you call connection.rollback. Otherwise connection.commit
I have a nodejs mysql pool that I am exporting like this:
let pool = mysql.createPool({
connectionLimit: 20,
host: keys.connection.host,
user: keys.connection.user,
password: keys.connection.password,
database: keys.connection.database,
dateStrings: true
// debug:true //Set this to true for verbose debugging. Leaving this to default for now cause it is creating too many messages at my console
})
pool.getConnection((err, connection) => {
if (err) {
if (err.code === 'PROTOCOL_CONNECTION_LOST') {
console.error('Database connection was closed.')
}
if (err.code === 'ER_CON_COUNT_ERROR') {
console.error('Database has too many connections.')
}
if (err.code === 'ECONNREFUSED') {
console.error('Database connection was refused.')
}
}
if (connection) connection.release()
return
})
pool.query = util.promisify(pool.query)
module.exports = pool
I promisified by pool so that now I can use async await syntax.
Now, I am trying to create transaction in Nodejs mySQL connection pool. I came upon this code in stackoverflow:
pool.getConnection(function(err, connection) {
connection.beginTransaction(function(err) {
if (err) { //Transaction Error (Rollback and release connection)
connection.rollback(function() {
connection.release();
//Failure
});
} else {
connection.query('INSERT INTO X SET ?', [X], function(err, results) {
if (err) { //Query Error (Rollback and release connection)
connection.rollback(function() {
connection.release();
//Failure
});
} else {
connection.commit(function(err) {
if (err) {
connection.rollback(function() {
connection.release();
//Failure
});
} else {
connection.release();
//Success
}
});
}
});
}
});
});
The problem is I might have 5 or 6 queries that I want to do in a transaction which is going to create a call back hell. Is there a way to use async await to perform the similar thing as with the callback shown above?
I wanted to know if it is possible to use async await when using transaction with mysql. I created the snippet below using the mysql documentation but the problem below is that if I have multiple queries and I want them to fire one after the another there is no way to specify it without creating a call back hell. Has anyone been able to do it with async await?
pool.getConnection(function(err, connection) {
connection.beginTransaction(function(err) {
if (err) { throw err; }
connection.query('INSERT INTO posts SET title=?', title, function (error, results, fields) {
if (error) {
return connection.rollback(function() {
throw error;
});
}
var log = 'Post ' + results.insertId + ' added';
connection.query('INSERT INTO log SET data=?', log, function (error, results, fields) {
if (error) {
return connection.rollback(function() {
throw error;
});
}
connection.commit(function(err) {
if (err) {
return connection.rollback(function() {
throw err;
});
}
//console.log('success!');
connection.release();
});
});
});
});
})
I am using a connection pool by the way as you can see above. If you want to know how the pool is created here is the code:
let pool = mysql.createPool({
connectionLimit: 20,
host: keys.connection.host,
user: keys.connection.user,
password: keys.connection.password,
database: keys.connection.database,
dateStrings: true
// debug:true //Set this to true for verbose debugging. Leaving this to default for now cause it is creating too many messages at my console
})
pool.getConnection((err, connection) => {
if (err) {
if (err.code === 'PROTOCOL_CONNECTION_LOST') {
console.error('Database connection was closed.')
}
if (err.code === 'ER_CON_COUNT_ERROR') {
console.error('Database has too many connections.')
}
if (err.code === 'ECONNREFUSED') {
console.error('Database connection was refused.')
}
}
// if (connection) connection.release()
return
})
pool.query = util.promisify(pool.query)
module.exports = pool
If you don't need the returned value of each query/request. You can use promise in every request and store it in an array and do promise all at the end:
let requestArray = []
// push each request to requestArray
// then do
Promise.all(requestArray).then((values) => {
console.log(values);
});
if you do, you need something like async waterfall : async-waterfall : npmjs.com/package/async-waterfall
Hope that helps
I have a node mysql connection that used to work properly but since traffic started coming i am getting a strange error
Error: Connection lost: The server closed the connection.
This is the class that i'm using
const mysql = require('mysql');
class Database {
constructor() {
this.connection = mysql.createConnection({
host: process.env.DB_HOST,
user: process.env.DB_USERNAME,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
port: 3306,
debug: false,
multipleStatements: false
});
}
query(sql, args) {
return new Promise((resolve, reject) => {
this.connection.query(sql, args, (err, rows) => {
if (err)
return reject(err);
resolve(rows);
});
});
}
close() {
return new Promise((resolve, reject) => {
this.connection.end(err => {
if (err)
return reject(err);
resolve();
});
});
}
}
module.exports = Database;
Can someone help as to why this is happening?
Edit: this is how i call the code
const database = new Database();
database.query(`select * from users...
`, [req.user.id, parseInt(req.body.after)])
.then(rows => {
appData[".."] = rows['ddd']
res.status(200).json(appData);
database.close()
}, err => {
return database.close().then(() => { throw err; })
})
.catch(err => {
console.log(err);
res.status(500).json("Database Error");
})
first create file ex database.js
var mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit: 10,
host: conf_core_sys.dbConfig.host,
user: conf_core_sys.dbConfig.user,
dateStrings: true,
password: conf_core_sys.dbConfig.pass,
database: conf_core_sys.dbConfig.dbName,
port:conf_core_sys.dbConfig.port,
debug: false
});
module.exports = pool;
exports.executeQuery = function (query, callback) {
pool.getConnection(function (err, connection) {
if (err) {
connection.release();
throw err;
}
connection.query(query, function (err, rows) {
connection.release();
if (!err) {
callback(null, {
rows: rows
});
}
});
connection.on('error', function (err) {
throw err;
return;
});
});
}
second step :
let database = require("database")
let sql ="SELECT * from users";
database.query(sql, function (error, results, fields) {
if (error) {
callback(results)
} else {
callback(results)
}
})
some time ago i had the same problem, but at this time the probelm has not happened, maybe this solution helping you,
var mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit: 10,
host: process.env.DB_HOST,
user: process.env.DB_USERNAME,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
port: 3306,
debug: false,
multipleStatements: false
});
module.exports = pool;
exports.executeQuery = function (query, callback) {
pool.getConnection(function (err, connection) {
if (err) {
connection.release();
throw err;
}
connection.query(query, function (err, rows) {
connection.release();
if (!err) {
callback(null, {
rows: rows
});
}
});
connection.on('error', function (err) {
throw err;
return;
});
});
}
I have a lot of urls, for every url I call the function load(url), this function parse the html, extracts the needed data and builds a bulk insert query as you can see in my test.js code. The problem is, if I have to many urls (like 100+), I get a Error: ER_LOCK_DEADLOCK from mysql. I tried to use async.queue but this is somehow not working (I don't know why, maybe I am using is wrongly). How can I run many urls + queries one after another, avoiding parallel execution which I think resulted in a deadlock? Even using async.queue results to a DEADLOCK (not always).
test.js
const request = require('request');
const async = require('async');
const pool = require('./database');
const urls = [
'https://www.quora.com/What-is-the-best-way-to-have-delayed-job-queue-with-node-js',
'https://de.wikipedia.org/wiki/Reinhardt-Zimmermann-L%C3%B6sung',
'https://towardsdatascience.com/the-5-clustering-algorithms-data-scientists-need-to-know-a36d136ef68'
]
let load = function(url) {
request({url: url}, function(error, response, html) {
if(!error) {
console.log(html);
/**
* 1. Parse HTML
* 2. Create Array of Values
* 3. Call pool.query(sql, [values], function(error) { ... })
*/
let data = [{}];
let sql = "INSERT IGNORE INTO tbl_test (title, content) VALUES ?";
let values = [];
data.forEach((item) => { values.push(item) });
pool.query(sql, [values], function(error) {
if(error) throw error;
})
} else {
console.log("handle error...");
}
})
}
let jobs = []
/*urls.forEach((url) => {
//jobs.push(load(url)); // --> Works but fails if the urls list is to big -> mysql deadlock error!
jobs.push(function(callback) { callback(load(url)) });
})*/
let q = async.queue(function(task, callback) {
console.log("Task:", task.uri);
callback();
})
q.drain = function() {
console.log('all task completed');
pool.end();
}
urls.forEach((url) => {
q.push({uri: url}, function(err) {
console.log('finished processing ...')
});
});
databse.js
require('dotenv').config();
const mysql = require('mysql');
let pool = mysql.createPool(
{
connectionLimit: 10,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME
}
);
pool.getConnection((err, connection) => {
if(err) {
if(err.code === 'PROTOCOL_CONNECTION_LOST') {
console.log('Database connection lost.')
}
if(err.code === 'ER_CON_COUNT_ERROR') {
console.log('Database has too many connections.')
}
if(err.code === 'ECONNREFUSED') {
console.log('Database connection refused.')
}
if(err.code === 'POOL_CLOSED') {
console.log('Pool is closed.')
}
}
if(connection) {
connection.release()
}
return;
});
module.exports = pool;
I have changed the code to use async.series instead of async.queue, beacuse the tasks would run in parallel in queue (see: https://caolan.github.io/async/docs.html#queue).
test.js
...
let tasks = [];
context.forEach((ctx) => {
tasks.push(function(callback) { load(ctx, callback) });
});
async.series(tasks, function(err) {
if(err) return next(err);
});