MySQL NodeJs - Proper way to get rows.each to work - mysql

When I look for simple examples, everybody's style seems quite different. I'm tried 2 different styles, and got 2 different issues. In the code below, I have identified the source of the code and the error it gets in comments. I comment out or uncomment out each section and run separately, but each one has it's own errors. The "console.log(rows); " statement is showing the data, so the query itself is running and working.
// get the client
const mysql = require('mysql2');
const dashline = '---------------------------------';
console.log (dashline);
console.log ("Starting test");
// create the connection to database
const connection = mysql.createConnection({
host: 'myhost',
user: 'myuser',
password: 'mypass',
database: 'myDatabase'
});
console.log ("Got the database connection");
query = "select ID, user_nicename, user_email from wp_users where user_login = 'admin' limit 3 ";
console.log ("Starting query");
// Attempt 1
/*
connection.query(query, function(err, rows, fields){
if (err) throw err;
// from: https://html5hive.org/node-js-quickies-working-with-mysql/
// error: SyntaxError: Unexpected token { (on the rows.each line below)
rows.each(element, index) {
console.log(element.ID+ " " + element.user_nicename);
}
console.log (dashline);
console.log ("Query End");
process.exit(); // Else Node hangs and must hit cntl-break to exit
});
*/
// Attempt 2
connection.query(query, function(err, rows, fields){
if (err) throw err;
console.log(rows);
// Roughly based on example on this page:
// https://datatables.net/reference/api/each()
// TypeError: rows.each is not a function
rows.each( function(element, index) {
console.log(element.ID + " " + element.user_nicename);
});
console.log (dashline);
console.log ("The end");
process.exit(); // Else Node hangs and must hit cntl-break to exit
});

The method .each for Arrays doesn't exist, you should be using .forEach(function (element, index) {...}) instead

Use the following:
rows.forEach( function(element, index) {
console.log(element.ID + " " + element.user_nicename);
});
They are certainly similar, but there are differences. For example, "forEach" is an array method, but "$.each" can be used on any type of collection. And "forEach" is a built-in, whereas "$.each" requires loading the jQuery library.

Got an answer here: [https://github.com/sidorares/node-mysql2/issues/999[1]. Problem with .forEach, .each or .map is that you are inside another function which is not an async function, meaning you cannot use "await" to call another async routine.
for (let r=0; r < rows.length; ++r) {
console.log(rows[r].ID + " " + rows[r].user_nicename);
await UpdatePassword(connection, rows[r].ID);
}
He also provided this alternative:
One "functional" way to iterate sequentially similar to map ( imo slightly less readable then for loop ):
await rows.reduce( async (previousPromise, row) => {
await previousPromise;
return UpdatePassword(row.ID);
}, Promise.resolve());

Related

Connecting to MySql database from AWS Lambda function using Node.js, no connect callback

I am trying to connect an external (not AWS) MySql server from an AWS Lambda function written in Node.js using nodejs14.x environment, but the connect() callback is not called.
I am been struggling with this problem since days, there are a lot of references to similar issues but I really tried all possible permutations of solutions I found.
I am deploying with SAM and testing both on local machine and on real AWS.
Here is the sample code of the lambda helper
const mysql = require('mysql');
exports.helloFromLambdaHandler = async () => {
const message = 'Hello from Lambda!';
console.info(`${message}`);
var sql = "SELECT 1+? AS sum";
var values = [1];
console.log("Doing createConnection");
const connection = mysql.createConnection({
/* my connection data */
});
console.log("Doing connect");
connection.connect( (err) => {
console.log("Inside connection callback");
console.log('connected as id ' + connection.threadId);
if(!err) {
console.log("DB connected, thread id is " + connection.threadId);
console.log("Doing query");
connection.query(sql, values, (err, result, values) => {
console.log("Inside query callback");
if(!err) {
console.log("Query ok!");
console.log(result);
connection.end();
} else {
console.log("Error executing query: " + err.message);
}
});
} else {
console.log("Error connecting db: "+ err.message);
}
});
console.log ("Returning...");
return message;
}
The log is
Hello from Lambda!
Doing createConnection
Doing connect
Returning...
The expected behaviour is that after "Returning..." I should see the log "Inside connection callback" then "Inside query callback" and then "Query ok!".
Instead the callback of connect() appears not invoked.
I know that I can call query() directly skipping connect() but also doing so I encounter same issue.
Any clue?
Thank you!
SOLUTION
As suggested by the accepted answer, returning a promise is the solution to let Node complete all the queue. Unfortunately it's not possible to complete the Lambda and leave it running in background in a safe manner, for what I understand.
I am investigating alternative solutions such as:
mysql2 library which supports promises natively
serverless-mysql npm package which handles shared db connections
Below the running demo code
const mysql = require('mysql');
exports.helloFromLambdaHandler = async (event, context) => {
const message = 'Hello from Lambda!';
console.info(`${message}`);
var sql = "SELECT 1+? AS sum";
var values = [1];
console.log("Doing createConnection");
const connection = mysql.createConnection({
/* my connection data */
});
console.log("Doing query");
const promise = new Promise( (resolve, reject) => {
connection.query(sql, values, (err, result, values) => {
console.log("Inside query callback");
if(!err) {
console.log("Query ok!");
console.log(result);
connection.end();
resolve(message);
} else {
console.log("Error executing query: " + err.message);
reject(err);
}
});
});
console.log ("Returning...");
return promise;
}
You are using async handler, thus your function probably completes before your connect() has a chance to execute.
To try to overcome the issue, you can use Promise as shown in AWS docs.

NPM - MYSQL - Passing query results to another variable

I was looking for some help in regards to a database package that I'm building for a larger application. The application will essentially keep certain information on a database through the use of the mysql package in npm. The problem I'm facing is that when I try to pass the results variable to a variable that I've created I always get undefined. Though if I place a log statement underneath the variable transfer statement displaying the results variable it will be properly populated. I've done a fair amount of research and believe that this has something to do with promises and/or the callback function that's part of the query. I was wondering if I could have someone examine my code and let me know the best course of action. I've spent a couple of hours on this researching online and trying various solutions but nothing has worked.
const mysql = require("mysql");
var sqlResults;
var pool = mysql.createPool({
host: "example.com",
user: "exampleUser",
password: "123456Password",
database: "TestDB"
});
module.exports = {
...
databaseSelect: function(table, fields, conditionalStmt) {
pool.getConnection(function(err, connection) {
if (err)
throw err;
console.log("Connected to the example DB!");
var sql = "SELECT " + fields + " FROM " + table + " " + conditionalStmt;
connection.query(sql, function(error, results, fields) {
console.log("Successfully retrieved records from " + table + "\n\t" + sql);
sqlResults = results;
connection.release();
sqlResults = results;
console.log(results);
if (error)
throw error;
});
});
console.log(sqlResults);
return sqlResults;
}
}
Here's a sample of the output that I'm receiving:
I am ready!
undefined
undefined
Connected to the example DB!
Successfully retrieved records from User_Level_Info
SELECT HashID, Level, Experience FROM User_Level_Info WHERE HashID = 'e578059cabc6f937f0219127384126143e272acbac52c331345d573e0f085d21'
[ RowDataPacket {
HashID: 'e578059cabc6f937f0219127384126143e272acbac52c331345d573e0f085d21',
Level: 1,
Experience: 0 } ]
Here convert it into a Promise.
So this will wait until it gets resolved or reject
databaseSelect: function(table, fields, conditionalStmt) {
return new Promise(function(resolve, reject) {
pool.getConnection(function(err, connection) {
if (err)
return reject(err);
console.log("Connected to the example DB!");
var sql = "SELECT " + fields + " FROM " + table + " " + conditionalStmt;
connection.query(sql, function(error, results, fields) {
console.log("Successfully retrieved records from " + table + "\n\t" + sql);
sqlResults = results;
connection.release();
sqlResults = results;
console.log(results);
if (error)
return reject(error);
});
});
resolve(sqlResults);
});
}
//call your function
databaseSelect(params)
.then(function(rows) {
console.log(rows)
})
.catch((err) {
console.log(err)
}); // Throw async to escape the promise chain

Save Query result into Variable Alexa Skills Json

I needed a DB for an alexa app, so I set up and and it INSERTS nicely, but when im trying to SELECT and save it to a variable the values saved to the variable are [Object Object] instead of wanted value, I know it can be async problem or parsing problem but i just cant fix the code, some help would be cool,
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest'
&& Alexa.getIntentName(handlerInput.requestEnvelope) === 'buscaIntent';
},
handle(handlerInput) {
const mysql = require('mysql');
const connection = mysql.createConnection
({
host: 'remotemysql.com',
user: 'RBb34534sd',
password: 'xxxxxxxxx',
database: 'RBsdfewrg'
});
var stat = connection.query('SELECT `spe` FROM `prueba` WHERE `nombre` LIKE "raichu" limit 1', function (err, result, fields) {
if (err) throw err;
console.log(result);
return result[0];
});
connection.end();
return handlerInput.responseBuilder
.speak("Busc " + stat)
.reprompt("reprompt buscar")
.getResponse();
}
}; ```
The issue is that you're not waiting for your database query to complete before sending your response to the Alexa service. Requests in node.js are non-blocking, meaning you either need to nest the request with a callback, or leverage Promises / async-await patterns so that the SQL query is processed before the function is fully executed.
You can read more on converting the built-in library for SQL connections to support Promises here, or use a library like this that already has a wrapper in place.
In either scenario, the end result would be refactored to something like this:
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest'
&& Alexa.getIntentName(handlerInput.requestEnvelope) === 'buscaIntent';
},
async handle(handlerInput) {
const mysql = require('mysql2/promise');
const connection = await mysql.createConnection
({
host: 'remotemysql.com',
user: 'RBb34534sd',
password: 'xxxxxxxxx',
database: 'RBsdfewrg'
});
var stat = await connection.execute('SELECT `spe` FROM `prueba` WHERE `nombre` LIKE "raichu" limit 1', function (err, result, fields) {
if (err) throw err;
console.log(result);
return result[0];
});
return handlerInput.responseBuilder
.speak("Busc " + stat)
.reprompt("reprompt buscar")
.getResponse();
}
Another article describing async calls for Alexa requests here.
I think the query is returning an object you can't keep the object in speech. Check what's inside the object and if you have a field that you want inside that object then access by stat.YourField.

Why is my AWS Lambda node.js mysql query not returning?

I'm trying to write some external data into some local tables. We'll be looping through an array, writing most of the data in each array element to the main table and the rest to related tables, replacing all the data each time.
I've stripped the code down to the bare bones to show the problem I'm having. The DELETE runs fine, but the INSERT runs only once, and doesn't even return.
I have a screenshot of the output at https://imgur.com/a/zA6Hz8g .
In it, you can see that the code for the DELETE runs fine (ComQueryPacket sent, OkPacket returned) but when it gets to the INSERT, the ComQueryPacket is sent but nothing is returned. And then the code just falls through.
This results in the first row writing successfully, but no subsequent rows get written.
I've tried changing the connection to use pools, but that didn't help either.
Any ideas?
var mysql = require('mysql');
var promise = require('promise');
const con = mysql.createConnection({
<connectionInfo>,
debug: true
});
function connectToDB() {
return new promise((resolve, reject) => {
console.log("IN connectToDB");
con.connect(function(err) {
if (err) {
console.log("ERROR: Could not connect -- " + err);
reject;
}
console.log("Connected!");
resolve();
});
});
}
function deleteExistingMainRow() {
return new promise((resolve, reject) => {
var query = "DELETE FROM my_table";
con.query(query, [],
function(err, result) {
if (err) {
console.log("ERROR in deleteExistingMainRow: " + err);
reject;
}
else {
console.log("DEBUG: Successful delete of main row");
resolve();
}
});
});
}
function writeMainRow(data_row) {
return new promise((resolve, reject) => {
console.log("IN writeMainRow");
var query = 'INSERT INTO my_table SET id = ?';
con.query(query, [data_row.id],
function(err, result) {
console.log("YES we tried to query");
if (err) {
console.log("ERROR in writeMainRow: " + err);
reject(err);
}
else {
console.log("DEBUG: Successful write of main row");
resolve();
}
});
});
}
exports.handler = function(event, context) {
connectToDB().then(function(script) {
deleteExistingMainRow().then(function(script) {
var data = [{ "id": 1 }, { "id": 2 }, { "id": 3 }];
data.forEach(data_row => {
writeMainRow(data_row).then(function(script) {
console.log("DEBUG: Main row written in forEach");
},
function(err) {
if (err) { console.log("ERR"); } process.exit(0);
}());
});
console.log("DEBUG: Hey we're exiting now");
con.commit;
con.end(function(err) {
console.log("Error on con end: " + err);
});
context.done(null, "DONE");
process.exit(0);
});
});
};
Just a few moths ago AWS made Node.js v 8.10 runtime available in lambda.
Which means, you can use async/await and Promises. So, we can rearrange code to something like this:
exports.handler = async (event, context) => {
const dbConnection = await connectToDB();
await deleteExistingMainRow();
const data = [{ "id": 1 }, { "id": 2 }, { "id": 3 }];
// use here for...of loop to keep working with async/await behaviour
for(const data_row of data){
await writeMainRow(data_row);
}
}
Also, you can rewrite your code to use native Promises or async/await functions.
And of course, cover logic on try/catch block, I've skipped them for simplicity.
The reason why your code is not behaving as you expect is because of the asynchronous nature of NodeJS.
Your for_each loop spawns several threads that are going to INSERT the data in your database.
As soon as these threads are started, the rest of the code will execute, starting with console.log("DEBUG: Hey we're exiting now");
So the commit happens before all the INSERT calls are done and, more importantly, you're calling Process.exit() in your code. This terminates the runtime, even before the INSERT can finish.
Call callback() instead as per https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-handler.html
Handling your multiple asynchronous writes can be done differently. First, as grynets commented before me, I would strongly suggest to rewrite your code using async/await to make the call easier to read.
Then, you have to understand that each call to writeMainRow will return its own Promise and your code must wait for ALL promises to complete before to commit() and to callback()
Promise.all(...) will do that for you. See the doc at https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/all
try using
INSERT INTO table_name(id) VALUES (?);
I know both your query and the above query works the same. Just give it a Try.
And just make sure your for loop is working properly sending values to the writeMainRow(function). It wouldnt show an error you if pass an empty value and make sure you are not passing the same values in the for loop. And i think you have to pass writeMainRow(data_row.id) rather than writeMainRow(data_row).
Hope this helps.
And one more suggestion if you are updating multiple rows there are options in mysql node library like transactions. Using those functions will be more effective and you can roll back the result if you face error. Other option is to write procedures, in which case you mysql server will bear the computation.

Use promise to process MySQL return value in node.js

I have a python background and is currently migrating to node.js. I have problem adjusting to node.js due to its asynchronous nature.
For example, I am trying to return a value from a MySQL function.
function getLastRecord(name)
{
var connection = getMySQL_connection();
var query_str =
"SELECT name, " +
"FROM records " +
"WHERE (name = ?) " +
"LIMIT 1 ";
var query_var = [name];
var query = connection.query(query_str, query_var, function (err, rows, fields) {
//if (err) throw err;
if (err) {
//throw err;
console.log(err);
logger.info(err);
}
else {
//console.log(rows);
return rows;
}
}); //var query = connection.query(query_str, function (err, rows, fields) {
}
var rows = getLastRecord('name_record');
console.log(rows);
After some reading up, I realize the above code cannot work and I need to return a promise due to node.js's asynchronous nature. I cannot write node.js code like python. How do I convert getLastRecord() to return a promise and how do I handle the returned value?
In fact, what I want to do is something like this;
if (getLastRecord() > 20)
{
console.log("action");
}
How can this be done in node.js in a readable way?
I would like to see how promises can be implemented in this case using bluebird.
This is gonna be a little scattered, forgive me.
First, assuming this code uses the mysql driver API correctly, here's one way you could wrap it to work with a native promise:
function getLastRecord(name)
{
return new Promise(function(resolve, reject) {
// The Promise constructor should catch any errors thrown on
// this tick. Alternately, try/catch and reject(err) on catch.
var connection = getMySQL_connection();
var query_str =
"SELECT name, " +
"FROM records " +
"WHERE (name = ?) " +
"LIMIT 1 ";
var query_var = [name];
connection.query(query_str, query_var, function (err, rows, fields) {
// Call reject on error states,
// call resolve with results
if (err) {
return reject(err);
}
resolve(rows);
});
});
}
getLastRecord('name_record').then(function(rows) {
// now you have your rows, you can see if there are <20 of them
}).catch((err) => setImmediate(() => { throw err; })); // Throw async to escape the promise chain
So one thing: You still have callbacks. Callbacks are just functions that you hand to something to call at some point in the future with arguments of its choosing. So the function arguments in xs.map(fn), the (err, result) functions seen in node and the promise result and error handlers are all callbacks. This is somewhat confused by people referring to a specific kind of callback as "callbacks," the ones of (err, result) used in node core in what's called "continuation-passing style", sometimes called "nodebacks" by people that don't really like them.
For now, at least (async/await is coming eventually), you're pretty much stuck with callbacks, regardless of whether you adopt promises or not.
Also, I'll note that promises aren't immediately, obviously helpful here, as you still have a callback. Promises only really shine when you combine them with Promise.all and promise accumulators a la Array.prototype.reduce. But they do shine sometimes, and they are worth learning.
I have modified your code to use Q(NPM module) promises.
I Assumed your 'getLastRecord()' function that you specified in above snippet works correctly.
You can refer following link to get hold of Q module
Click here : Q documentation
var q = require('q');
function getLastRecord(name)
{
var deferred = q.defer(); // Use Q
var connection = getMySQL_connection();
var query_str =
"SELECT name, " +
"FROM records " +
"WHERE (name = ?) " +
"LIMIT 1 ";
var query_var = [name];
var query = connection.query(query_str, query_var, function (err, rows, fields) {
//if (err) throw err;
if (err) {
//throw err;
deferred.reject(err);
}
else {
//console.log(rows);
deferred.resolve(rows);
}
}); //var query = connection.query(query_str, function (err, rows, fields) {
return deferred.promise;
}
// Call the method like this
getLastRecord('name_record')
.then(function(rows){
// This function get called, when success
console.log(rows);
},function(error){
// This function get called, when error
console.log(error);
});
I am new to Node.js and promises. I was searching for a while for something that will meet my needs and this is what I ended up using after combining several examples I found. I wanted the ability to acquire connection per query and release it right after the query finishes (querySql), or to get a connection from pool and use it within Promise.using scope, or release it whenever I would like it (getSqlConnection).
Using this method you can concat several queries one after another without nesting them.
db.js
var mysql = require('mysql');
var Promise = require("bluebird");
Promise.promisifyAll(mysql);
Promise.promisifyAll(require("mysql/lib/Connection").prototype);
Promise.promisifyAll(require("mysql/lib/Pool").prototype);
var pool = mysql.createPool({
host: 'my_aws_host',
port: '3306',
user: 'my_user',
password: 'my_password',
database: 'db_name'
});
function getSqlConnection() {
return pool.getConnectionAsync().disposer(function (connection) {
console.log("Releasing connection back to pool")
connection.release();
});
}
function querySql (query, params) {
return Promise.using(getSqlConnection(), function (connection) {
console.log("Got connection from pool");
if (typeof params !== 'undefined'){
return connection.queryAsync(query, params);
} else {
return connection.queryAsync(query);
}
});
};
module.exports = {
getSqlConnection : getSqlConnection,
querySql : querySql
};
usage_route.js
var express = require('express');
var router = express.Router();
var dateFormat = require('dateformat');
var db = require('../my_modules/db');
var getSqlConnection = db.getSqlConnection;
var querySql = db.querySql;
var Promise = require("bluebird");
function retrieveUser(token) {
var userQuery = "select id, email from users where token = ?";
return querySql(userQuery, [token])
.then(function(rows){
if (rows.length == 0) {
return Promise.reject("did not find user");
}
var user = rows[0];
return user;
});
}
router.post('/', function (req, res, next) {
Promise.resolve().then(function () {
return retrieveUser(req.body.token);
})
.then(function (user){
email = user.email;
res.status(200).json({ "code": 0, "message": "success", "email": email});
})
.catch(function (err) {
console.error("got error: " + err);
if (err instanceof Error) {
res.status(400).send("General error");
} else {
res.status(200).json({ "code": 1000, "message": err });
}
});
});
module.exports = router;
I am still a bit new to node, so maybe I missed something let me know how it works out. Instead of triggering async node just forces it on you, so you have to think ahead and plan it.
const mysql = require('mysql');
const db = mysql.createConnection({
host: 'localhost',
user: 'user', password: 'password',
database: 'database',
});
db.connect((err) => {
// you should probably add reject instead of throwing error
// reject(new Error());
if(err){throw err;}
console.log('Mysql: Connected');
});
db.promise = (sql) => {
return new Promise((resolve, reject) => {
db.query(sql, (err, result) => {
if(err){reject(new Error());}
else{resolve(result);}
});
});
};
Here I am using the mysql module like normal, but instead I created a new function to handle the promise ahead of time, by adding it to the db const. (you see this as "connection" in a lot of node examples.
Now lets call a mysql query using the promise.
db.promise("SELECT * FROM users WHERE username='john doe' LIMIT 1;")
.then((result)=>{
console.log(result);
}).catch((err)=>{
console.log(err);
});
What I have found this useful for is when you need to do a second query based on the first query.
db.promise("SELECT * FROM users WHERE username='john doe' LIMIT 1;")
.then((result)=>{
console.log(result);
var sql = "SELECT * FROM friends WHERE username='";
sql = result[0];
sql = "';"
return db.promise(sql);
}).then((result)=>{
console.log(result);
}).catch((err)=>{
console.log(err);
});
You should actually use the mysql variables, but this should at least give you an example of using promises with mysql module.
Also with above you can still continue to use the db.query the normal way anytime within these promises, they just work like normal.
Hope this helps with the triangle of death.
You don't need to use promises, you can use a callback function, something like that:
function getLastRecord(name, next)
{
var connection = getMySQL_connection();
var query_str =
"SELECT name, " +
"FROM records " +
"LIMIT 1 ";
var query_var = [name];
var query = connection.query(query_str, query_var, function (err, rows, fields) {
//if (err) throw err;
if (err) {
//throw err;
console.log(err);
logger.info(err);
next(err);
}
else {
//console.log(rows);
next(null, rows);
}
}); //var query = connection.query(query_str, function (err, rows, fields) {
}
getLastRecord('name_record', function(err, data) {
if(err) {
// handle the error
} else {
// handle your data
}
});
Using the package promise-mysql the logic would be to chain promises using then(function(response){your code})
and
catch(function(response){your code}) to catch errors from the "then" blocks preceeding the catch block.
Following this logic, you will pass query results in objects or arrays using return at the end of the block. The return will help passing the query results to the next block. Then, the result will be found in the function argument (here it is test1). Using this logic you can chain several MySql queries and the code that is required to manipulate the result and do whatever you want.
the Connection object is created to be global because every object and variable created in every block are only local. Don't forget that you can chain more "then" blocks.
var config = {
host : 'host',
user : 'user',
password : 'pass',
database : 'database',
};
var mysql = require('promise-mysql');
var connection;
let thename =""; // which can also be an argument if you embed this code in a function
mysql.createConnection(config
).then(function(conn){
connection = conn;
let test = connection.query('select name from records WHERE name=? LIMIT 1',[thename]);
return test;
}).then(function(test1){
console.log("test1"+JSON.stringify(test1)); // result of previous block
var result = connection.query('select * from users'); // A second query if you want
connection.end();
connection = {};
return result;
}).catch(function(error){
if (connection && connection.end) connection.end();
//logs out the error from the previous block (if there is any issue add a second catch behind this one)
console.log(error);
});
To answer your initial question: How can this be done in node.js in a readable way?
There is a library called co, which gives you the possibility to write async code in a synchronous workflow. Just have a look and npm install co.
The problem you face very often with that approach, is, that you do not get Promise back from all the libraries you like to use. So you have either wrap it yourself (see answer from #Joshua Holbrook) or look for a wrapper (for example: npm install mysql-promise)
(Btw: its on the roadmap for ES7 to have native support for this type of workflow with the keywords async await, but its not yet in node: node feature list.)
This can be achieved quite simply, for example with bluebird, as you asked:
var Promise = require('bluebird');
function getLastRecord(name)
{
return new Promise(function(resolve, reject){
var connection = getMySQL_connection();
var query_str =
"SELECT name, " +
"FROM records " +
"WHERE (name = ?) " +
"LIMIT 1 ";
var query_var = [name];
var query = connection.query(query_str, query_var, function (err, rows, fields) {
//if (err) throw err;
if (err) {
//throw err;
console.log(err);
logger.info(err);
reject(err);
}
else {
resolve(rows);
//console.log(rows);
}
}); //var query = connection.query(query_str, function (err, rows, fields) {
});
}
getLastRecord('name_record')
.then(function(rows){
if (rows > 20) {
console.log("action");
}
})
.error(function(e){console.log("Error handler " + e)})
.catch(function(e){console.log("Catch handler " + e)});
May be helpful for others, extending #Dillon Burnett answer
Using async/await and params
db.promise = (sql, params) => {
return new Promise((resolve, reject) => {
db.query(sql,params, (err, result) => {
if(err){reject(new Error());}
else{resolve(result);}
});
});
};
module.exports = db;
async connection(){
const result = await db.promise("SELECT * FROM users WHERE username=?",[username]);
return result;
}