NodeJS - For loop - Wait for a callback to continue - mysql

I am trying to query, foreach id in an array, some data from the MySQL database.
My for loop looks like the following
for(var i = 0; i < res.length; i++) {
var steamid = res[i].steamid;
db.query('SELECT `name` FROM `users` WHERE `steamid`=?', [steamid], function(err, rows) {
var player = {name: rows[i].name};
team.push(player);
});
}
But the problem is that the for loop continues before the query is finished and then the var i is already increased by one or two.
Is there a way to check if the query has finished?

Just use a closure so that your i refers to the correct index.
function finished(){...}//called when you fetched all your data
var nbDone = 0;
res.forEach(function(elem, i){
var steamid = elem.steamid;
db.query('SELECT `name` FROM `users` WHERE `steamid`=?', [steamid], function(err, rows) {
var player = {name: rows[i].name};
team.push(player);
nbDone++;
if(nbDone == res.length){
finished();//you know all of your queries have completed
}
});
});
you may want to have a look at promises or async in order to handle your asynchronous flows

Related

How i can execute multiple queries (UPDATE) in NODE using Sequelize?

I need to execute multiples queries in NODE using Sequelize.
I tried execute with a for, but it's didn't work.
Someone can help me?
exports.update = async (req, res) => {
for (let i = 0; i < req.body.length; i++) {
const id = req.body[i].id;
const permissao = req.body[i].permissao;
const sql =
`UPDATE tetsiste_usuarios.usuarioGraficos SET permissao = ${permissao} \n
WHERE id = ${id} AND permissao <> ${permissao};`;
sequelize.query(sql, { type: Sequelize.QueryTypes.UPDATE })
.then(data => res.json(data))
}
}
You need to await the call the sequelize.query but that said... you are doing this all wrong. The idea behind using an ORM like Sequelize is that it abstracts the SQL and provides protection against things like SQL injection attacks and malormed queries, which your code is susceptible to. If I mess with the request and pass in this for permissao it will drop your database table 1; DROP TABLE usuarioGraficos; SELECT 1 FROM dual. That is bad. You're also calling res.json() on every loop, which will also result in an error.
The proper way to do this is to use the Model.create() function for inserts or Model.update() for updates.
loop insert with sql injection vulnerability
const Sequelize = require('sequelize');
const models = require('./models'); // your model definitions
// an array to store our results
const updates = [];
// use a forEach to get each entry from the body array
for (let i = 0; i < req.body.length; i++) {
// this ID
const id = req.body[i].id;
// this permission
const permissao = req.body[i].permissao;
// update the permission where the ID matches and the permission does not
const update = await models.UsuarioGraficos.update({ permissao }, {
where: {
id,
permissao: {
[Sequelize.Op.ne]: permissao,
},
},
});
// add the result
updates.push(update);
}
// send all the updates outside the loop
res.json(updates);

Call sql query synchrously

Currently working on node rest api project where I want to fetch data for a list of data. for example : I have a list of post_id([1,2,3....]) for a particular tag(mobile) and for each post_id I want to retrieve post title and description from mysql database. But calling sql query is synchrounous.
How to control flow for each post id result to combine in one.
my db calling code is here :
var express = require('express');
var app = express();
var bodyParser = require('body-parser'); // call body-parser
var addData = require('./dbhandler/addData'); // call database handler to insertdata
var getData = require('./dbhandler/getData');
//route function to get feeds by tags
router.route('/postfeedsbytags/:tag')
// get all new article feeds filtered by tag
.get(function(req,res){
var success;
console.log(req.params.tag)
var json_results = [];
getData.getPostFeedsByTag(req.params.tag,function(error, results, fields){
if (!error){
for (var i = 0; i < results.length; i++) {
getData.getPostFeedsByPostId(results[0]['post_id'],function(error, results, fields){
if (!error){
success = 1;
json_results.push(results[0]);
res.json({"success" : success, "datasets" : json_results});
} else{
success = 0;
console.log('Error while performing Query.'+error);
res.json({"success" : success});
}
});
}
// res.json({"success" : success, "datasets" : results});
} else{
success = 0;
console.log('Error while performing Query.'+error);
res.json({"success" : success});
}
});
});
I think you can use the IN operator in the query to get all the posts in a single query and then iterate over it.
If you don't want to use IN operator then use async library for flow control. You can use the async.map function from it.

Can't insert element from array in mysql database with nodejs

I actually have a problem saving some data from an array in a mysql database with nodejs.
This is my code
for (var i = 0; i < data.data.length; i++) {
var imageObject = data.data[i];
var url = imageObject.images.standard_resolution.url;
var id = imageObject.id;
var sql = 'SELECT COUNT(*) AS imageIDCount FROM images WHERE id = ?'
var ids
connection.query(sql, [id], function(err, rows, fields) {
console.log(rows[0].imageIDCount);
if (err) throw err;
if (rows[0].imageIDCount == 0) {
console.log(id + " doesn't exist"); // ### the ID at this point is always the last from that array
//insertImage(id, url);
} else {
// console.log("ID exists");
}
});
}
This code run's when I get a response from an rest-api with the request-framework.
So my problem is that at the point I get the result from the count-query and there is no element with the specific id I get always the same id. I think that's because I use the same variable "id" there but how can I fix it ? I hope somebody can help me.
In this case, you're a victim of Node's asynchronous event loop. You're executing a synchronous for-loop and defining id:
for (var i = 0; i < data.data.length; i++) {
var id = imageObject.id;
}
This works in normal Javascript if you try and do something with id, but because the database module you're using runs asynchronously, that entire loop will have already completed before your first db query completes, effectively clobbering the value of id.
You'll need to re-write your function to behave asynchronously instead, or use something like node-async to help.
Here's a quick example of how that might look. Note that I didn't write your insertImage function for you; you'll need to rewrite that to support a callback as well.
async.each(data.data, function(imageObject, callback) {
var sql = 'SELECT COUNT(*) AS imageIDCount FROM images WHERE id = ?'
connection.query(sql, [imageObject.id], function(err, rows, fields) {
if (err) callback(err);
if (rows[0].imageIDCount == 0) {
console.log(id + " doesn't exist");
insertImage(imageObject.id, imageObject.images.standard_resolution.url, function(err) {
callback(err); // fires the callback to async
})
} else {
console.log(id + " already exists");
callback(); // maybe you want an error here too?
}
});
}, function(err, results) {
// all of your db queries are completed
});

Yahoo-Finance Query Speed

I'm currently working on a project that involves querying yahoo-finance for many different ticker symbols. The bottleneck is acquiring the data from yahoo, so I was wondering if there is a way I might go about speeding this up.
If I used multiple machines to query and then aggregated the data, would that help? I only have one physical machine; how might I go about doing that?
Thanks!
EDIT: Currently, I'm using Node.js, yahoo-finance, and Q.deferred to ask yahoo for historical data. Then, once all the promises are fulfilled (for each ticker), I'm doing a Q.all() to persist the data.
var data = [];
tickers = ["goog", "aapl", ...];
...
Q.all(_.map(tickers, function(symbol) {
return getYahooPromise(symbol);
}))
.done( function() { persistData(data) });
getYahooPromise retrieves data for the ticker symbol and pushes it into the data array. Once all promises are resolved, the data is persisted in a MySQL database.
SECOND EDIT:
More code:
var sequentialCalls = [];
for ( var i = 0; i < tickers.length / chunkSize; i++ ) {
sequentialCalls.push( persistYahooChunk );
}
sequentialCalls.push( function(callback) {
connection.end();
callback();
});
async.series( sequentialCalls )
exports.persistYahooChunk = function(callback) {
console.log("Starting yahoo query");
var currentTickers = tickers.slice(currentTickerIndex,currentTickerIndex + chunkSize);
return yahooFinance.historical( {
symbols: currentTickers,
from: "2015-01-28",
to: "2015-02-05"
}).then( function(result) {
console.log("Query " + currentTickerIndex + "/" + tickers.length + "completed");
currentTickerIndex += chunkSize;
//add valid data
var toPersist = _.map(result, function(quotes, symbol) {
return [symbol, quotes.length != 0 ];
});
var query = "INSERT INTO `ticker` (`symbol`, `valid`) VALUES ?";
connection.query(query, [toPersist], function(err, result) {
if (err) {
console.log (err);
}
//console.log(result);
callback();
});
});
}
The bottleneck is because you are doing one query per ticker.
Depending on the data you need to pull, if you could do a single query that includes all your tickers it would be much faster.
Here is an example if you need to get all current prices for a list of tickers, with a single query :
http://finance.yahoo.com/webservice/v1/symbols/A,B,C,D,E/quote?format=json

Using Q to return secondary query in node with express and mysql

New to node, As I am cycling through a roster of students, I need to check and see if a teacher has requested them for tutoring.
I realized I can't just do this:
var checkRequest = function(id){
var value = '';
roster.query('SELECT * FROM teacher_request WHERE student_id ='+id, function(err, row){
value = row.length;
}
return value;
}
After a bit of digging around promises looked like a great solution, but if I simply return the deferred.promise from the checkRequest function, all I get is an object that says [deferred promise] which I can't access the actual data from. (Or have not figured out how yet)
If I follow along with their api and use .then (as illustrated in the getRow) function, I am back in the same problem I was in before.
function checkRequest(id) {
console.log(id);
var deferred = Q.defer();
connection.query('SELECT * FROM teacher_request WHERE student_id ='+id, function(err, row){
deferred.resolve(row.length);
});
return deferred.promise;
}
var getRow = function(id){
checkRequest(id).then(function(val) {
console.log(val); // works great
return val; //back to the same problem
});
}
The roster needs to be able to be pulled from an external API which is why I am not bundling the request check with the original roster query.
Thanks in advance
From the stuff you posted, I assume you have not really understood the concept of promises. They allow you to queue up callbacks, that get executed, when the asynchronous operation has finished (by succeeding or failing).
So instead of somehow getting the results back to your synchronous workflow, you should convert that workflow to work asynchronous as well. So a small example for your current problem:
// your students' ids in here
var studentsArray = [ 1, 2, 5, 6, 9 ];
for( var i=0; i<studentsArray.length; i++ ) {
checkRequest( i )
.then( function( data ){
console.log( data.student_id );
// any other code related to a specific student in here
});
}
or another option, if you need all students' data at the same time:
// your students' ids in here
var studentsArray = [ 1, 2, 5, 6, 9 ];
// collect all promises
var reqs = [];
for( var i=0; i<studentsArray.length; i++ ) {
reqs.push( checkRequest( i ) );
}
Q.all( reqs )
.then( function(){
// code in here
// use `arguments` to access data
});