connecting to MySQL async using nodeJs and 'Q' - mysql

I'm dealing with project using Postgres DB, it uses nodeJs and 'Q' framework. I want to replace postgres DB with mysql database, since I'm a total newbie to nodeJs, I have no idea on how to idea? It will be great if you could share an example of mysql connect and if possible a simple query using nodeJs and 'Q'.

Take a look at this answer. It seems to answer your question.
Taken exactly from the the answer:
var mysql = require('mysql');
var Q = require('Q');
var connection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : ''
});
connection.connect();
function doQuery1(){
var defered = Q.defer();
connection.query('SELECT 1 AS solution',defered.makeNodeResolver());
return defered.promise;
}
function doQuery2(){
var defered = Q.defer();
connection.query('SELECT 2 AS solution',defered.makeNodeResolver());
return defered.promise;
}
Q.all([doQuery1(),doQuery2()]).then(function(results){
res.send(JSON.stringify(results[0][0][0].solution+results[1][0][0].solution));
});
connection.end();
However, if you don't need to make multiple calls at once, you can skip the use of Q.all and just do something like:
return doQuery1().then(function(res){
console.log(res);
connection.end();
});

Thanks, this is how i ended up doing it in the end:
var connection = mysql.createConnection({host : 'host',user :'user', password : 'pass', database : 'db'});
var connect = Q.denodeify(connection.connect.bind(connection));
connect().then(function (values) {
console.log('...connected to database');
var query = Q.denodeify(connection.query.bind(connection));
})

Related

When database is changed value is not updating in socket.io

I have write code of socket.io and nodejs to fetch value from database and send the value to the client without refresh with setInterval. It is working fine but I don't want to use setInterval function. Because sometimes my database change in hours, sometimes in minuts and sometimes in miliseconds. So I don't want to use setInterval function. I only want that when database value change it automatically update. thats it. I am kinda stuck in it.
var express = require('express');
var app = express();
var server = require('http').createServer(app);
var io = require('socket.io').listen(server);
var mysql = require('mysql');
users = [];
connections = [];
disconnection = [];
var connection = mysql.createConnection({
host: 'localhost',
user: 'root',
password: '',
database: 'test'
});
connection.connect(function(error){
if(!!error) {
console.log('Error in connection');
} else {
console.log('Database Connected');
}
});
server.listen(process.env.PORT || 3000);
console.log('Server Running...');
app.get('/', function(req, res) {
res.sendFile(__dirname + '/index.html');
});
io.sockets.on('connection', function(socket) {
connections.push(socket);
console.log('Connected: %s socket connected',
connections.length);
setInterval(function() {
connection.query('select value from piechart',
function(error, rows, fields) {
if(rows.length>0) {
io.sockets.emit('new message', {msg: rows});
//io.sockets.emit('new message', {msg:
'Change.'});
//console.log('Value is fetched from database');
//console.log(rows);
} else {
alert('what will happend');
}
//connection.release();
});
}, 3000);
});
You should take the interval out of the socket scope and make it global.
Then make an interval loop that fetches the value and emits it globally if the value changed from last time it was fetched, to all connected socket clients.
You state that you would like to avoid an interval, but at the end you are going to be needing one.
You can check out mysql-events
A Node JS NPM package that watches a MySQL database and runs callbacks
on matched events.
Another way around it, would be to find all the events that update the value and make them inform your NodeJS process.
But this might be hard if it has components that are out of your control (example : unable of adding code to other process that updates DB)

Unable to Update MySQL (AWS RDS) table from Lambda though i am able to connect and INSERT

Below is my lambda function. A request is coming from API (API Gateway). Even when I tried to pass the values directly to update table, its not updating.
I am able to INSERT data into the same table through API. Just wondering what's wrong here.
Appreciate any help. Thanks in advance.
var mysql = require('mysql');
var config = require('./config.json');
var sql,response;
exports.handler = function(event, context) {
var connection = mysql.createConnection({
host : config.dbhost,
user : config.dbuser,
password : config.dbpassword,
database : config.dbname
});
exports.handler = function(event, context,callback) {
//prevent timeout from waiting event loop
context.callbackWaitsForEmptyEventLoop = false;
var variable1= event.variable1;
var variable2= event.variable2;
var id= event.id;
connection.query('UPDATE LocationData SET latitude = ?,longitude =? WHERE userId = ?', [variable1, variable2,id], function(error, results, fields) {
response = {};
response['id'] = results.id;
response['variable1'] = results.variable1;
response['variable2'] = results.variable2;
context.succeed(response);
});
};
};
You are missing the call to the connect function. A working example would look like this:
var mysql = require('mysql');
var config = require('./config.json');
exports.handler = function(event, context) {
var connection = mysql.createConnection({
host : config.dbhost,
user : config.dbuser,
password : config.dbpassword,
database : config.dbname
});
connection.connect(); // <--- MISSING THIS!
exports.handler = function(event, context,callback) {
context.callbackWaitsForEmptyEventLoop = false;
var variable1 = event.variable1;
var variable2 = event.variable2;
var id = event.id;
var sql = 'UPDATE LocationData SET variable1=?,variable2=? WHERE userId=?';
connection.query(sql, [variable1, variable2, id], function(error, results, fields) {
context.succeed({
id: results.id,
variable1: results.variable1,
variable2: results.variable2
});
});
};
};
Also, as a general advice, you should always check the error variable in the callback, in case something is going wrong, and react accordingly to it.

Why is query execution time so high for Mysql Native driver for node.js? Any alternatives?

Why does same query take around 300 ms more than actual execution time when using Mysql native driver for Nodejs , even with or without using "create pool" options?
Please refer highlighted section in below attached screenshot
Also for native driver execution time, please see below attached screenshot:
Codebase for node.js Mysql Native driver
db.js
var mysql = require('mysql');
var connectionpool = mysql.createPool({
connectionLimit: 100, //important
host: 'localhost',
user: config.development.username,
password: config.development.password,
database: config.development.database,
multipleStatements: true,
});
exports.getConnection = function(callback) {
connectionpool.getConnection(callback);
};
emp.js
var connections = require('../config/db');
var pre_query = new Date().getTime();
var sqlstatements = "select * from employees where recordstate<>'raw'and DATE(createdAt) " + "between ('1982-03-24') and ('2017-04-23') order by employeesID desc limit 20 offset 0;" + "select COUNT(*) as count from employees where recordstate<>'raw'and " + "DATE(createdAt) between ('1982-03-24') and ('2017-04-23') order by employeesID desc";
connections.getConnection(function(err, c) {
console.log(sqlstatements)
c.query(sqlstatements, function(err, projects) {
console.log(err);
if (!err) {
c.release();
var red = new Object();
red.rows = JSON.parse(JSON.stringify(projects[0]));
red.count = JSON.parse(JSON.stringify(projects[1]))[0].count;
var post_query = new Date().getTime();
// calculate the duration in seconds
var duration = (post_query - pre_query) / 1000;
console.log(duration)
// console.log(red);
res.json(red);
}
})
})
Your measurement in JS includes connection setup and all the processing of the results. The times reported in MySQL Workbench (and the MySQL terminal client) are only what the server reports (running a query and result transmission). Alone the connection setup probably takes most of the 300ms. Try moving the pre_query init to the line right before running the actual query. And end the time measurement directly after that (before the console.log(err) call. This delivers a result comparable to that reported by other client tools.

Do i need to connect to database every HTTP request in NodeJS?

I am using LocomotiveJS(MVC) based on ExpressJS for developing my first simple API.. I am still in learning phase.. I am using mysql as my database..
My question is, do i need to initiate a connection to mysql everytime there's a controller request?
Here's my code :
SongsController.show = function() {
//this.title = 'Locomotive';
console.log("nice imbasss");
var contacts = SongsModel.foo("GOOD");
var dbConnection = DBUtilities.connectMysql();
var contactsArr = [];
dbConnection.query('select * from contacts', function(err, rows, fields) {
//console.log(err);
console.log(rows);
//console.log(fields);
//contactsArr = rows;
});
DBUtilities.endMysql(dbConnection);
};
As you can notice, everytime songs/show is called, connectMysql() is called.. Am i doing right?
You should connect MySQL everytime when you fire a query to MySQL and should close your connection after that because mysql injection may occur by third party.

Use NodeJS to run an SQL file in MySQL

I am using the mysql plugin for nodejs and it is fantastic at doing everything I need so far.
However I have come across a stumbling block. I have created a MySQL provider that exports a mysql pool:
var mysql = require('mysql');
var mysqlPool = mysql.createPool({
host : '127.0.0.1',
user : 'root',
password : ''
});
mysqlPool.getConnection(function(err, connection) {
connection.query("INSERT INTO ....
I can select, create, insert, etc all fine, but I've come across a task where I would like to run a small SQL string with about 10 different commands together. I've thought about doing one of the following:
Execute a SQL file against a database using mysql
Run a query and enable multipleStatements
I have written some code to execute mysql as a child process, but I would really love to avoid doing this:
var cp = require("child_process");
var cmdLine = "mysql --user=autobuild --password=something newdb < load.sql";
cp.exec(cmdLine, function(error,stdout,stderr) {
console.log(error,stdout,stderr);
});
The problem with option two is I would rather not enable multipleStatements for every query, just this one particular one. I have thought about creating a new connection, but just thinking of other ways this could be done.
TL;DR?
Using NodeJS and MySQL how can I execute the following into a database:
CREATE TABLE pet (name VARCHAR(20), owner VARCHAR(20) );
CREATE TABLE sofa (name VARCHAR(20), owner VARCHAR(20) );
CREATE TABLE table (name VARCHAR(20), owner VARCHAR(20) );
Thanks so much for anyone who shares their ideas
You can use the connection option called multipleStatements:
// Works with the pool too.
var connection = mysql.createConnection({multipleStatements: true});
Then, you can pass the queries like these:
connection.query('CREATE 1; CREATE 2; SELECT 3;', function(err, results) {
if (err) throw err;
// `results` is an array with one element for every statement in the query:
console.log(results[0]); // [create1]
console.log(results[1]); // [create2]
console.log(results[2]); // [select3]
});
Here is a big .sql file friendly way to progmatically execute multiple queries against MySQL without using the multipleStatements property and a massive buffer. Please note this is not the most efficient way to upload to mysql.
var mysql = require('mysql');
var fs = require('fs');
var readline = require('readline');
var myCon = mysql.createConnection({
host: 'localhost',
port: '3306',
database: '',
user: '',
password: ''
});
var rl = readline.createInterface({
input: fs.createReadStream('./myFile.sql'),
terminal: false
});
rl.on('line', function(chunk){
myCon.query(chunk.toString('ascii'), function(err, sets, fields){
if(err) console.log(err);
});
});
rl.on('close', function(){
console.log("finished");
myCon.end();
});
Looks like there is a module for this purpose: execsql
This will do the trick:
var express = require("express");
var bodyParser = require("body-parser");
var mysql = require('mysql');
var fs = require('fs');
var app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.use(express.static(__dirname));
var defaultConnection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : '',
database: 'utpDatabase'
});
function dbCall_file (endpoint, operation, settings, filename){
app.post(endpoint, function(request, response){
var data = request.body;
var path = 'path/to/queries/' + filename
var connection = (settings == 'default') ? defaultConnection : settings;
var callback = function(arg){
var query = arg.replace(/{{[ ]{0,2}([a-zA-Z0-9\.\_\-]*)[ ]{0,2}}}/g, function(str, mch){ return data[mch]});
connection.query(query, function(err, rows){
if (!err){
var toClient = (operation == 'select') ? rows : {success: true};
response.send(toClient);
} else {
console.log(err);
response.send({error: err, query: query});
}
});
};
fs.readFile(path, 'utf8', function(err, data){
if (!err){
callback(data);
} else {
callback(err);
}
});
});
};
Then in your .sql file wrap your node variables in double curlies- for example, if you want to query first names for node variable data.firstName from your post call:
SELECT * FROM users WHERE name={{ firstName }}