multiple async mongo request generate messed up returns - json

I'm trying to build a JSON out of multiple requests on my mongodb.
since I'm not using DBRef, I have to build the "table joints" by myself, and that's how I ended up in this mess.
This is the code that is giving me the headaches from a couple of days now.
(the mongo part is done with mongoskin)
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
userfeed.find({},args).toArray(function(e, feed) {
if (e) console.log("error: ", e);
// gather aparel infos
var i=0;
var ret_feeds = [];
feed.forEach(function(cur_feed) {
var outfits=[];
console.log("beginning with: " + cur_feed.url);
var resfeed = "";
resfeed = cur_feed;
resfeed.url = baseurl + snapurl + resfeed.url + "_small.jpg";
i=0;
cur_feed.apparel_ids.forEach(function(item) {
/*>>*/ apparel.find({"_id": item},{limit:1}).toArray(function(e, results) {
console.log(">>>>>>>>>>> APPAREL_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
outfits.push(results); // quick and dirty, 2 b refined..
i++;
if(i>=cur_feed.apparel_ids.length)
{
// pack it up
// resfeed.url = resfeed.url;
resfeed.outfits = outfits;
resfeed.fav = false;
resfeed.bough = false;
// retrieve store infos
/*>>>*/ store.find({"_id":resfeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t############# STORE_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
resfeed.store = resstore[0];
resfeed.store.class = "hem";
ret_feeds.push(resfeed);
if(ret_feeds.length >= feed.length)
{
console.log("\t\t######################calling return [ ret_feeds.length = " + ret_feeds.length + " feed.length = " + feed.length);
out.send(ret_feeds);
}
});
}
});
});
});
});
}
This code fails, because returns the json before finishing its task, so the next time that it tries to return another json it crashes miserably due to the fact the the headers have already been sent.
Now as you can see, I have 3 collections: userfeed, apparel and stores.
the goal of this function is to retrieve all the items in the userfeed collection, extract the outfits (based on the outfit_id array that is part of the userfeed collection), and also extract the store infos related in the same way to each userfeed entry, like so:
I know that async.js or equivalent is the way to go: I've red like a gazillion of other posts here on SO, but I still can't get my head around it, probably because the whole mechanism behind the async.js or flow control in general it's still out of focus in my mind.
I'm still a noob at node :)
UPDATE
I think I found the right path for understanding here: http://www.sebastianseilund.com/nodejs-async-in-practice
this guy made a terrific job in describing use-case by use-case all the ways to apply async.js to your code.
I'll post the solution as soon as I get around it.
UPDATE 2
Thanks to the above dude I could work out a working solution, below is the answer.

After so much struggling I have finally managed to get a solution.
async.js was the answer as I was (obviously) suspecting.
FYI here's the working code.
If you like to point out improvements or anything else, you are more than welcome
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
var args;
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
var outfits=[];
var feeds = array();
async.series([
// userfeed find
function(callback) {
userfeed.find({},args).toArray(function(e, feed) {
if(e) callback(e);
feeds = array(feed);
console.log(feeds.length + " retrieved. stepping in");
callback(null, null);
});
},
// join
function(callback) {
async.forEach(feeds, function(thefeed, callback) {
var i = feeds.indexOf(thefeed);
async.parallel([
// load apparel infos
function(callback) {
console.log("\t >>> analyzing thefeed id " + thefeed._id);
async.forEach(thefeed.apparel_ids, function(apparel_id, callback) {
apparel.find({"_id": apparel_id},{limit:1}).toArray(function(e, results) {
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
console.log("\t\t### pushing data into thefeed_index: " + i);
if(!util.isArray(feeds[i].oufits)) feeds[i].outfits = array();
feeds[i].outfits.push(results);
callback(null, null);
});
}, callback);
},
// load store infos
function(callback) {
store.find({"_id":thefeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t### STORE_FIND");
if (e) console.log("error: ", e);
feeds[i].store = resstore[0];
feeds[i].store.class = "hem";
callback(null, null);
});
}
], callback);
}, callback);
}
// MAIN
], function(err, result) {
console.log("feed retrieval completed. stepping out");
if (err) return next(err);
out.send(feeds);
});
};

Related

Extract data from JSON within Lambda so it's not undefined

Looking for advice / second opinion. I'm trying to pass JSON via HTTP API (api gateway) > Lambda. I'm receiving the data (pic of Cloudwatch), getting undefined when trying to extract values. The file is being written to S3, but undefined.
I included Lambda code, picture of Cloudwatch logs. I'm about there :) . Newbie here...
Logs
Lambda Code
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event, context, callback) => {
var bucketName = process.env.bucketName;
var folder = process.env.folder;
var filename = getFileName();
console.log("Filename:" + filename);
var raw = JSON.stringify(event.body);
console.log("raw after stringify:" + raw);
var results = JSON.parse(raw);
console.log("results:" + results);
let firstname = results.firstName;
console.log("firstName:" + firstname);
let lastname = results.lastName;
console.log("lastName:" + lastname);
let message = results.Message;
console.log("Message:" + message);
var content = message + "," + firstname + "," + lastname;
console.log("content:" + content);
var keyName = getKeyName(folder, filename);
var params = { Bucket: bucketName, Key: keyName, Body: content };
s3.putObject(params, function (err, data) {
if (err)
console.log(err)
else
console.log("Successfully saved object to: " + bucketName + "/" + keyName);
});
function getKeyName(folder, filename) {
return folder + '/' + filename;
}
function getFileName() {
var _uuid = uuidv4();
var _date = Date.now();
return _uuid + "-" + _date;
}
function uuidv4() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
return v.toString(16);
});
}
var html = '<html><head><title>Prayer received result</title></head>' +
'<body><h1>Your Prayer has been received!</h1></body></html>';
//callback(null, res); - use this when using proxy model
callback(null, html);
};
Made the following changes.
//var raw = JSON.stringify(event.body);
//console.log("raw after stringify:" + raw);
var results = JSON.parse(event.body);
console.log("results:" + results);
Hope this helps others. I'm newer as of this post to Lambda, JSON.

Unexpected Error: write callback called multiple times executing Gulp.js tasks inside a loop

I'm trying to execute some gulp tasks inside a map loop to generate css files for each sass theme I defined.
Finally, I return all merged tasks with merge-stream package.
Unfortunately, callback seems sent for each iteration / gulp task of my loop and i'm getting the following message :
Error: write callback called multiple times
#Task('build')
build() {
var self = this;
var tasks = this.themes.map((theme) => {
this.foldersXml = getAttributes('jntFolderWithExternalProvider');
this.filesXml = getAttributes('jntFolder');
this.foldersXml[theme] = []; this.foldersXml[theme].push(getAttributes('jntFolderWithExternalProvider'));
this.filesXml[theme] = []; this.filesXml[theme].push(getAttributes('jntFolderWithExternalProvider'));
fs.readFile(this.themesFolder + '/' + theme + '/' + this.fileName, 'utf8', (err: Error, data: string & Buffer) => {
if (err) {
throw new gutil.PluginError({
plugin: 'build',
message: 'Main file doesn\'t exist for the theme: ' + theme
});
} else {
var vars = data.match(/\$(.*?)\:/g);
this.requiredVars.map(requiredVar => {
if(vars !== null){
if(!vars.contains(requiredVar)){
throw new gutil.PluginError({
plugin: 'build',
message: 'Required variable ' + requiredVar + ' is not defined'
});
};
}
});
}
});
return gulp.src(this.templatesFolder + '/' + this.pattern)
.pipe(header('#import \'' + this.themesFolder + '/' + theme + '/' + this.fileName + '\';'))
.pipe(sass.sync().on('error', gutil.log))
.pipe(rename(function (path: any) {
var file = path.basename + path.extname;
var folderXml = getAttributes('jntFolderWithExternalProvider') as any;
folderXml[file] = [ getAttributes('jntCssFile') ];
self.foldersXml[theme][0][file] = []; self.foldersXml[theme][0][file].push(folderXml);
var fileXml = getAttributes('jntFolderWithExternalProvider') as any;
fileXml['jcr:content'] = [ getAttributes('jntRessource') ];
self.filesXml[theme][0][file] = []; self.filesXml[theme][0][file].push(fileXml);
path.dirname += '/' + file;
}))
.pipe(gulp.dest(this.themesFolder + '/' + theme))
});
return merge(tasks);
}
#SequenceTask()
run(){
return ['build'];
}
I just want to get rid of this message and be sure that callback is invoked only at the end. What's the best approach for you ?

Loop through MySQL rows and store results in array

I am trying to store details of affectedRows from a MySQL INSERT query using NodeJS. My mind is melting trying to comprehend callbacks and Promises. Being a single-man dev team I wanted to reach out and ask for the clearest explanation as to how a callback can be applied here in a foreach loop.
The goal should be clear from these few lines of code; store data in the affected_rows[] array.
var affected_rows = [];
asset_array.forEach(function(asset) { // Populate the asset table
var query_string = "INSERT IGNORE INTO " + asset_table + " SET symbol = '" + asset[0] + "', name = '" + asset[1] + "'";
connection.query(query_string, function(err, rows, fields) {
if (err) throw err;
if ( rows.affectedRows > 0 ) {
data_to_push = [asset_table, asset[0], asset[1]];
affected_rows.push(data_to_push);
}
});
});
console.log(affected_rows); // [] for obvious async reasons
One option would be to process the asset_array inside a function and pass a callback into it and when loops through asset_array check if the current index matches the asset_array length (-1). If so call the callback.
var affected_rows = [];
function processAssets(cb) {
var array_len = asset_array_len.length
asset_array.forEach(function(asset, index) {
var query_string = 'INSERT IGNORE INTO ' + asset_table + ' SET symbol = \'' + asset[0] + '\', name = \'' + asset[1] + '\'';
connection.query(query_string, function(err, rows, fields) {
if (err) throw err
if (rows.affectedRows > 0) {
data_to_push = [asset_table, asset[0], asset[1]];
affected_rows.push(data_to_push);
}
if (index === (array_len - 1)) cb()
});
});
}
processAssets(function() {
console.log(affected_rows)
})
Will suggest you to have a look at async Queue.
You can change your code like this to use it.
//2nd Step - Perform each task and then call callback() to move to next task
var q = async.queue(function(query_string, callback) {
connection.query(query_string, function(err, rows, fields) {
if (err) throw err;
if ( rows.affectedRows > 0 ) {
data_to_push = [asset_table, asset[0], asset[1]];
affected_rows.push(data_to_push);
}
callback(); //call next task
});
}, 2); //here 2 means concurrency ie 2 tasks will run in parallel
//Final Step - Drain gives you end of queue which means all tasks have finished processing
q.drain = function() {
//Do whatever you want after all tasks are finished
};
//1st Step - create a queue of all tasks that you need to perform
for (var i = 0; i < asset_array.length ; i++) {
var query_string = "INSERT IGNORE INTO " + asset_table + " SET symbol = '" + asset[0] + "', name = '" + asset[1] + "'";
q.push(query_string);
}

Getting response with NodeJS request module

I just started using the twitch kraken api and I have a few questions.
Whenever I attempt to get a JSON object there is no response. I am attempting to run this function through Amazon AWS Lambda, and don't have access to a console.
In the code below my callback function will always print out "SUCCESS got streamers ERROR". I am pretty certain right now the "ERROR" comes from my initial setting of result.
How come result does not get changed into the proper JSON?
I have used postman and it returns the proper thing with the query and param, and headers:
function getJSON(callback){
var result = "ERROR";
request.get(url(games[0]),function(error,response,body){
console.log("requested for url: " + url(games[0]));
var d = JSON.parse(body);
result = d.streams[0];//.channel.display_name;
// for(var i = 0; i < limit; i++){
// streamers.push(d.streams[i].channel.display_name)
// }
streamers.push(result);
});
if (streamers.length < 0){
callback("ERROR");
}else{
callback("SUCCESS got streamers " + result);
}
}
function url(game){
return {
url: "https://api.twitch.tv/kraken/streams/",//twitchlimit,
qs : {
'game' : 'overwatch',
'limit' : 2
},
headers: {
'Client-ID': clientID,
'Accept': 'application/json',
'Accept-Charset': 'utf-8',
}
};
}
I think your streamers code
if (streamers.length < 0){
callback("ERROR");
}else{
callback("SUCCESS got streamers " + result);
}
should be included in the request callback because currently it's not waiting for the request to finish, it's just carrying on so therefore the value of result will not change. Also the array length cannot be less than 0 so it will always go to the else and say "SUCCESS got streamers ERROR"
Thank you guys for the suggestions. I did have a few oversights and attempted to fix them.
I have implemented you suggestions and it seems to have worked a bit. I ended up putting the json.parse into a try/catch block, and moved the if/else statements inside the getJSON method. However, now I don't get any output.
This is how I am invoking the getJSON method:
function handleGameResponse(intent,session,callback){
//gets the game
var game = intent.slots.game.value;
if (!games.includes(game)){
var speechOutput = "You asked for: " + intent.slots.game.value;
//var speechOutput = "You asked for: " + games[game] + " That game is currently not an option. These are your current options: " + arrayToString(games)
var repromptText = "Please ask one from the current options.";
var header = "Invalid Game";
}else {
getJSON(function(data){
if(data !== "ERROR"){
var speechOutput = data; //capitalizeFirst(game) + " top three streamers are: " + arrayToString(streamers) + '.';
var repromptText = "Do you want to hear more about games?";
var header = capitalizeFirst(game);
}else{
var speechOutput = "I'm sorry, something went wrong and I could not get the streamers.";
}
//speechOutput = data;
});
//speechOutput = games[0] + " games[0], game= " + game; //this executes so the getJSON isn't executing
}
var shouldEndSession = false;
callback(session.attributes,buildSpeechletResponse(header,speechOutput,repromptText,shouldEndSession));
}
Does the above execute the same way? As in the shouldEndSession and callback execute before the getJSON has time to give a response?
For ref, this is the getJSON method now:
function getJSON(callback){
var result = "ERROR";
request.get(url(games[0]),function(error,response,body){
try{
var d = JSON.parse(body);
} catch (err){
callback("Sorry, something seems to have malfunctioned while getting the streamers");
}
result = d.streams[0].channel.display_name;
// for(var i = 0; i < limit; i++){
// streamers.push(d.streams[i].channel.display_name)
// }
streamers.push(result);
if (streamers.length <= 0){
callback("ERROR");
}else{
callback("SUCCESS got streamers " + result);
}
});
}

NodeJS creating JSON using all JSONs uploaded by user

I am trying to make a JSON file using all the JSON files in a directory. Every time a user uploads a new JSON a new combined JSON should be generated. I want the new JSON to have a custom structure hence cant use any libraries. I have the following code:
router.post('/upload', function(req, res) {
var sampleFile;
var bbbid = req.body.bbbid;
DDLFile = req.files.DDLFile;
j++;
DDLFile.mv('/uploads/' + bbbid + '/device' + j + '.json', function (err) {
if (err) {
res.status(500).send(err);
}
else {
res.redirect("fileuploaded");
}
});
var myfiles = [];
var fs = require('fs');
var arrayOfFiles = fs.readdirSync('/uploads/' + bbbid);
arrayOfFiles.forEach(function (file) {
myfiles.push(file);
console.log(myfiles);
});
console.log('No of Files:', myfiles.length);
var files = myfiles.length;
console.log('Files:', files);
console.log('J', j);
var cddl = "{ BBBID:" + bbbid;
if (files == 0) {
cddl = cddl + '}';
console.log('Entered if loop');
}
else {
var i = 0;
/*var obj;
fs.readFile('/uploads/' + bbbid + '/device' + j + '.json', 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
});*/
for (i = 0; i < files; i++) {
console.log('Entered For loop');
console.log('Count:', count);
console.log('Sensor:', sensor);
try{
var obj = fs.readFileSync('/uploads/' + bbbid + '/device' + count + '.json', 'utf8');}
catch(err){
console.log(err);
}
console.log('everything good');
var obj1 = JSON.parse(obj);
console.log('hi');
//JSON.stringify(obj);
var ddl = require('/uploads/' + bbbid + '/device' + count + '.json');
console.log('o');
cddl = cddl + ", {" + obj1.DDL.Sensor.Description.Verbose_Description + ":" + JSON.stringify(ddl) + "}"
JSON.stringify(cddl);
console.log(cddl);
count++;
sensor++;
console.log('Count:', count);
console.log('Sensor:', sensor);
}
cddl = cddl + '}';
JSON.stringify(cddl);
console.log(cddl);
}
});
I want to generate a new cddl everytime a new file is uploaded. Having a lot of problems. Help please!
I see two problems. First instead of this:
var obj = fs.readFileSync('/uploads/' + bbbid + '/device' + count + '.json', 'utf8');}
catch(err){
console.log(err);
}
console.log('everything good');
var obj1 = JSON.parse(obj);
You can write(fix path, if necessary):
var obj1 = require('./uploads/' + bbbid + '/device' + count + '.json')
Then, when you call:
JSON.stringify(cddl);
You're not saving the result anywhere. So you should save it in the place, you need to:
var a = JSON.stringify(cddl);
And when all set, dont forget to write to file back using fs.writeFileSync or async one fs.writeFile.