I need help with this, I really can't crack this one up.
I have this web site using Node.js to call mySQL queries and display them as tables, each in it's own page. To do so, I created 4 objects that include a name, a title and the query. when I have this code going four times (from queryPage[0] to queryPage[3]), It works great. But i want to put it into a for loop so I won't need 4 repetitions of the same lines, but then it doesn't work. the pages don't load... Any idea what is going on?
connection.query(queryPage[0].query, function(err, rows) {
var arr = [];
for (var i in rows) {
arr[i] = rows[i];
}
app.get('/' + queryPage[0].name, function(req, res) {
res.render('pages/' + queryPage[0].name, {
title: queryPage[0].title,
data: arr
});
});
});
I think you need something more like this
var queryPage = [ ... ];
// This route catches all request
app.get('/:name', function (req, res, next) {
// :name = req.params.name
var page;
for(var i = 0, l = queryPage.length; i < l; i++)
if(queryPage[i].name === req.params.name) {
page = queryPage[i];
break;
}
// Check the page exists
if(!page) return res.status(404).send('Not found');
// if(!page) return next();
connection.query(page.query, function(err, rows) {
res.render('pages/' + page.name , {
title: page.title,
data: rows
});
});
});
Related
To set the scenario for the code, the database stores Documents, and each document has the potential to have Images associated with them.
I have been trying to write a route that queries the database for each Document that has Images related to them, storing this data in JSON which is returned to the ajax request when completed, so the data can be viewed on the page. The closest I have got so far is the below attempt (see code).
router.post('/advanced_image_search', userAuthenticated, function(req, res, next) {
async.waterfall([
// First function is to get each document which has an image related
function getDocuments(callback){
connection.query(`SELECT DISTINCT(Document.document_id), Document.doc_name, Document.doc_version_no, Document.doc_date_added
FROM Document WHERE doc_type = 'image'`, function(err, results) {
if (err) {
callback(err, null);
return;
}
// The Object containing the array where the data from the db needs to be stored
var documents = {
'docs': []
};
// foreach to iterate through each result found from the first db query (getDocuments)
results.forEach(function(result) {
// New object to store each document
var document = {};
document.entry = result;
// This is the array where each image assciated with a document will be stored
document.entry.images = [];
// Push each document to the array (above)
documents.docs.push(document);
var doc_id = result.document_id;
})
// Returning the results as 'documents' to the next function
callback(null, documents);
})
},
function getImages(documents, callback){
// Variable assignement to the array of documents
var doc_array = documents.docs;
// Foreach of the objects within document array
async.forEachOf(doc_array, function(doc, key, callback){
// Foreach object do the following series of functions
async.waterfall([
function handleImages(callback){
// The id of the document to get the images for
var doc_id = doc.entry.document_id;
connection.query(`SELECT * FROM Image, Document WHERE Image.document_id = '${doc_id}' AND Image.document_id = Document.document_id`, function(err, rows) {
if (err) {
callback(err, null);
return;
}
callback(null, rows);
})
},
// Function below to push each image to the document.entry.images array
//
function pushImages(rows, callback){
// If multiple images are found for that document, the loop iterates through each pushing to the images array
for (var j = 0; j < rows.length; j++) {
// Creating new object for each image found so the data can be stored within this object, then pushed into the images array
var image = {
'image_name': rows[j].image_name
};
doc.entry.images.push(image);
}
callback(null, doc_array);
}
], function(err, doc_array){
if (err) {
console.log('Error in second waterfall callback:')
callback(err);
return;
}
console.log(doc.entry);
// callback(null, doc_array);
})
}, function(err, doc_array){
if (err) {
callback(err);
return;
}
callback(null, doc_array);
});
callback(null, doc_array);
}
], function(err, doc_array) {
if (err){
console.log('Error is: '+err);
return;
}
// The response that should return each document with each related image in the JSON
res.send(doc_array);
})
});
At the moment the results returned are:
1:
{entry: {document_id: 1, doc_name: "DocumentNameHere", doc_version_no: 1,…}}
entry:
{document_id: 1, doc_name: "DocumentNameHere", doc_version_no: 1,…}
doc_date_added:"2016-10-24"
doc_name:"DocumentNameHere"
doc_version_no:1
document_id:1
images:[]
As can be seen above, the images array remains empty even though with testing, the images are being found (console.log).
I hope someone is able to assist with this, as I am struggling to find the problem with this complex one.
Thanks
There are several async operations going on here and each operation needs a callback. See revised code:
router.post('/advanced_image_search', userAuthenticated, function(req, res, next) {
var getDocuments = function(next) {
// Function for getting documents from DB
var query = `SELECT DISTINCT(Document.document_id), Document.doc_name, Document.doc_version_no, Document.doc_date_added FROM Document WHERE doc_type = 'image'`; // Set the query
connection.query(query, function(err, results) {
// Run the query async
if(err) {
// If err end execution
next(err, null);
return;
}
var documentList = []; // Array for holding docs
for(var i=0; i<results.length; i++) {
// Loop over results, construct the document and push to an array
var documentEntry = results[i];
var documentObject = {};
documentObject.entry = documentEntry;
documentObject.entry.images = [];
documentObject.id = documentEntry.document_id;
documentList.push(documentObject);
}
next(null, documents); // Pass to next async operation
});
};
var getImages = function(documents, next) {
// Function for getting images from documents
var finalDocs = []; // Blank arry for final documents with images
for (var i=0; i<documents.length; i++) {
// Loop over each document and construct the query
var id = documents[i].id;
var query = `SELECT * FROM Image, Document WHERE Image.document_id = '${doc_id}' AND Image.document_id = Document.document_id`;
connection.query(query, function(err, images) {
// Execute the query async
if(err) {
// Throw error to callback
next(err, null);
return;
}
var processedDoc = processImages(documents[i], images); // Call a helper function to process all images into the document object
finalDocs.push(processedDoc); // Push the processed doc
if(i === documents.length) {
// If there are no more documents move onto next async
next(null, finalDocs);
}
});
}
};
var processImages = function(doc, images) {
for (var i=0; i< images.length; i++) {
// Loop over each document image - construct object
var image = {
'image_name': rows[j].image_name
};
doc.entry.images.push(image); // Push image into document object
}
return doc; // Return processed doc
};
getDocuments(function(err, docs) {
if(err) {
// Your error handler
}
if(docs) {
getImages(docs, function(err, finalDocs) {
if(err) {
// Your error handler
}
if(finalDocs) {
console.log(finalDocs);
res.status(200).json(finalDocs); // Send response
}
});
}
});
});
First we create a function to get documents - This function accepts a callback as its parameter. We run our query and construct our doc list. Then we return the document list by executing our callback
Next we run a function to get our images for each document. This function accepts our document list and a callback as our parameters. It retrieves images for each document and calls a helper function (sync)
Our helper function processes the images into each document and returns the processed document.
We then finish our operations by returning an array of processed documents via the second callback.
Other notes
We could tide this up by structuring this procedural style code into a contained JSON object
The nesting of the function executions at the end of the document could be cleaned up further
I've avoided using the async libray as it helps better understanding of the callback model
Event emitters could be used to flatten the callbacks - see https://nodejs.org/dist/latest-v7.x/docs/api/events.html
Hope this helps
Dylan
I wanna know if it's possible do multiple res.send/json in same method. I have a big problem with that because I wanna develop a function into a Web-Worker to call a put request each minute but in 2nd request I get "can't set headers after they are sent.". I know that it's not possible do it but I wanna know if exist some way to run.
exports.update = function (req, res) {
var feed = req.feed;
feed.title = req.body.title;
feed.apifeed = req.body.apifeed;
feed.apikey = req.body.apikey;
feed.active = req.body.active;
if(feed.apifeed && feed.apikey && feed.active){
var t = Threads.create();
t.eval(setInterval(
function(){
async.parallel([
function(callback, data){
var url = 'https://api.xively.com/v2/feeds/' + feed.apifeed + '.json?key=' + feed.apikey;
sensordata.getSensorData(url, function(data){
callback(null, data);
});
}
], function(err, data){
feed.content[0].value = data[0][0].value;
feed.content[0].date = new Date();
feed.save(function (err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.json(feed);
}
});
});
}, 5000)
);
}
};
Thanks for supporting and I wait your responses. Greetings!
You cannot respond to a request more than once, that wouldn't make sense.
If you need to stream data, then you will need to use long polling, WebSockets, Server-sent Events, etc.
Also, assuming Threads.create() does what I think it may be doing, spawning an OS thread just to do what you're currently doing is a waste of resources. Nothing in that block of code is CPU-bound.
I actually have a problem saving some data from an array in a mysql database with nodejs.
This is my code
for (var i = 0; i < data.data.length; i++) {
var imageObject = data.data[i];
var url = imageObject.images.standard_resolution.url;
var id = imageObject.id;
var sql = 'SELECT COUNT(*) AS imageIDCount FROM images WHERE id = ?'
var ids
connection.query(sql, [id], function(err, rows, fields) {
console.log(rows[0].imageIDCount);
if (err) throw err;
if (rows[0].imageIDCount == 0) {
console.log(id + " doesn't exist"); // ### the ID at this point is always the last from that array
//insertImage(id, url);
} else {
// console.log("ID exists");
}
});
}
This code run's when I get a response from an rest-api with the request-framework.
So my problem is that at the point I get the result from the count-query and there is no element with the specific id I get always the same id. I think that's because I use the same variable "id" there but how can I fix it ? I hope somebody can help me.
In this case, you're a victim of Node's asynchronous event loop. You're executing a synchronous for-loop and defining id:
for (var i = 0; i < data.data.length; i++) {
var id = imageObject.id;
}
This works in normal Javascript if you try and do something with id, but because the database module you're using runs asynchronously, that entire loop will have already completed before your first db query completes, effectively clobbering the value of id.
You'll need to re-write your function to behave asynchronously instead, or use something like node-async to help.
Here's a quick example of how that might look. Note that I didn't write your insertImage function for you; you'll need to rewrite that to support a callback as well.
async.each(data.data, function(imageObject, callback) {
var sql = 'SELECT COUNT(*) AS imageIDCount FROM images WHERE id = ?'
connection.query(sql, [imageObject.id], function(err, rows, fields) {
if (err) callback(err);
if (rows[0].imageIDCount == 0) {
console.log(id + " doesn't exist");
insertImage(imageObject.id, imageObject.images.standard_resolution.url, function(err) {
callback(err); // fires the callback to async
})
} else {
console.log(id + " already exists");
callback(); // maybe you want an error here too?
}
});
}, function(err, results) {
// all of your db queries are completed
});
I'm currently working on a project that involves querying yahoo-finance for many different ticker symbols. The bottleneck is acquiring the data from yahoo, so I was wondering if there is a way I might go about speeding this up.
If I used multiple machines to query and then aggregated the data, would that help? I only have one physical machine; how might I go about doing that?
Thanks!
EDIT: Currently, I'm using Node.js, yahoo-finance, and Q.deferred to ask yahoo for historical data. Then, once all the promises are fulfilled (for each ticker), I'm doing a Q.all() to persist the data.
var data = [];
tickers = ["goog", "aapl", ...];
...
Q.all(_.map(tickers, function(symbol) {
return getYahooPromise(symbol);
}))
.done( function() { persistData(data) });
getYahooPromise retrieves data for the ticker symbol and pushes it into the data array. Once all promises are resolved, the data is persisted in a MySQL database.
SECOND EDIT:
More code:
var sequentialCalls = [];
for ( var i = 0; i < tickers.length / chunkSize; i++ ) {
sequentialCalls.push( persistYahooChunk );
}
sequentialCalls.push( function(callback) {
connection.end();
callback();
});
async.series( sequentialCalls )
exports.persistYahooChunk = function(callback) {
console.log("Starting yahoo query");
var currentTickers = tickers.slice(currentTickerIndex,currentTickerIndex + chunkSize);
return yahooFinance.historical( {
symbols: currentTickers,
from: "2015-01-28",
to: "2015-02-05"
}).then( function(result) {
console.log("Query " + currentTickerIndex + "/" + tickers.length + "completed");
currentTickerIndex += chunkSize;
//add valid data
var toPersist = _.map(result, function(quotes, symbol) {
return [symbol, quotes.length != 0 ];
});
var query = "INSERT INTO `ticker` (`symbol`, `valid`) VALUES ?";
connection.query(query, [toPersist], function(err, result) {
if (err) {
console.log (err);
}
//console.log(result);
callback();
});
});
}
The bottleneck is because you are doing one query per ticker.
Depending on the data you need to pull, if you could do a single query that includes all your tickers it would be much faster.
Here is an example if you need to get all current prices for a list of tickers, with a single query :
http://finance.yahoo.com/webservice/v1/symbols/A,B,C,D,E/quote?format=json
I am using node, angular and mysql, the node routes would return a json that would be processed by angular, the json is returned by first querying the mysql DB using the node-mysql module,
In the below code I am unable to set the value of CreatedID, but the value gets logged properly in terminal. I was facing the same issue in the 1st query but then sorted it in the below code, now unable to access the nested query results.
var mysql = require('node-mysql/node_modules/mysql');
var connection = mysql.createConnection({
host : 'localhost',
user : "root",
password: "",
database:'designtaskmanager'
});
connection.connect();
var allDbCalls = function() {
var sendData = {};
var rowData = {};
var temp={};
var _this = this;
this.sendTask = function(callback) {
module.exports.taskData = rowData;
callback['success']();
};
this.getTask = function(callback) {
var strQuery = "select * from task";
connection.query( strQuery, function(err, rows){
if(err)
{
callback['failure']();
throw err;
}
else
{
//rowData = rows;
var tasks=[];
for (var i in rows)
{
var Title = rows[i].task_title;
var TaskDescription=rows[i].task_description;
var TaskCategory=rows[i].task_category;
var TaskID=rows[i].task_id;
var TaskStatus=rows[i].task_status;
var TaskStatusMessage
var CreatedBy;
var TaskCreationDate=rows[i].task_creation_date;
var _MS_PER_DAY = 1000 * 60 * 60 * 24;
var currentdate = new Date();
var ddd=dateDiffInDays(TaskCreationDate,currentdate);
function dateDiffInDays(a, b) {
// Discard the time and time-zone information.
var utc1 = Date.UTC(a.getFullYear(), a.getMonth(), a.getDate());
var utc2 = Date.UTC(b.getFullYear(), b.getMonth(), b.getDate());
return Math.floor((utc2 - utc1) / _MS_PER_DAY);
}
if(TaskStatus==0)
{
TaskStatus="label-info";
TaskStatusMessage="Ongoing since";
}
else if(TaskStatus==1)
{
TaskStatus="label-default";
TaskStatusMessage="Paused since"
}
else if(TaskStatus==2)
{
TaskStatus="label-success";
TaskStatusMessage="Completed in"
}
//USER DETAILS QUERY
var crid=rows[i].task_created_by;
var creatorQuery = "select user_email from users where user_id like ?";
connection.query( creatorQuery,[crid], function(err, createdbyrows){
if(err)
{
callback['failure']();
throw err;
}
else
{
for(var j=0; j< createdbyrows.length;j++)
{
CreatedBy=createdbyrows[0].user_email;
console.log(j);
}
console.log(CreatedBy);
}
});
var taskItem={"TaskID":TaskID,"TaskTitle":Title,"TaskDescription":TaskDescription,"TaskCategory":TaskCategory,"CreatedBy":CreatedBy,"TaskStatus":TaskStatus,"TaskStatusMessage":TaskStatusMessage,"DifferenceInDays":ddd};
tasks.push(taskItem);
}
rowData=tasks;
_this.sendTask(callback);
}
});
}
}
module.exports = function () {
var instance = new allDbCalls();
return instance;
};
The reason that you're seeing it on the console but not in the callback is due to a misunderstanding of asynchronous programming. When you:
for(var i in rows) {}
You are actually queuing up all of those queries at the same time, then, immediately after you try to set rowData to an empty array:
rowData=tasks; // remember, none of the queries have finished yet
_this.sendTask(callback);
So you pretty much call your callback when tasks is still an empty array. Remember, you can't call your final callback until ALL of your nested queries have finished!
To accomplish this, you may want to look at the async library: https://github.com/caolan/async#eachSeries
This will help you accomplish what you really want.
var async = require("async");
async.eachSeries(rows, function(row, cb) {
// Do each query here
// then call cb() when done, which tells the async library
// to "go to the next item in the array"
}, function(err) {
// This will get called when all of the single queries are finished
// Check err, then call your callback
_this.sendTask(callback);
});