Analyzing json using Watson API Nodejs - json

I would like to analyze a JSON file I dynamically create with Watson's tone analyzer. I would like it to read the file, then analyze it.
How can I make the tone_analyzer.tone method read the file? Thank you.
app.get('/results', function(req, res) {
// This is the json file I want to analyze
fs.readFile('./output.json', null, cb);
function cb() {
tone_analyzer.tone({
// How can I pass the file here?
text: ''
},
function(err, tone) {
if (err)
console.log(err);
else
console.log(JSON.stringify(tone, null, 2));
});
console.log('Finished reading file.')
}
res.render('results');
})

Your callback is missing a couple of arguments (error, data) (see the node fs documentation for more info). Data is the content of your file and would go where you are sending the text.
Try something like this:
app.get('/results', function(req, res) {
// This is the json file I want to analyze
fs.readFile('./output.json', 'utf8', cb);
function cb(error, data) {
if (error) throw error;
tone_analyzer.tone({
// How can I pass the file here?
text: data
},
function(err, tone) {
if (err)
console.log(err);
else
console.log(JSON.stringify(tone, null, 2));
});
console.log('Finished reading file.')
}
res.render('results');
})

Thanks to user Aldo Sanchez for his tip. I converted the input into JSON first since fs was returning it in the form of buffer data. Also, I made it search for the specific value in the key/value pair and return that content, instead of returning the whole string. This can be directly inputted to Watson's tone analyzer.
var data = fs.readFileSync('./output.json', null);
JSON.parse(data, function(key, value) {
if (key == "message") {
cb(value);
}
function cb(value, err) {
if (err) throw err;
tone_analyzer.tone({
text: value
},
function(err, tone) {
if (err)
console.log(err);
else
console.log(tone);
});
}
console.log('Finished reading file.')
});

Related

Converting csv to json object returns undefined

I've found several examples on S/O and otherwise, but they don't seem to be helping me.
I'm creating a private module in node that takes in a csv and converts it into a json object. It is properly outputting the correct value onto the command line, but the object itself is undefined.
exports.csvToJSON = function(file, callback) {
converter.fromFile(file, function(err, result) {
if (err) {
console.log('error: ', error)
} else {
return result;
callback(err, result);
}
});
console.log(callback)
}
I'm currently using the csvtojson module and have tried other similar packages. Articles I've referenced:
Function Returns undefined object
Why does this callback return undefined?
Why is function return value undefined when returned in a loop? (although not entirely relevant as this is not a loop function)
Callback returning undefined
I'm unsure if I'm just not understanding the callback correctly, but even if I console.log(result.type), it returns back undefined with or without the callback. I've also tried defining the callback like so:
exports.csvToJSON = function(file, callback) {
csvtojson().fromFile(file, function(err, result) {
if (err) {
console.log('error: ', error)
}
return result;
callback(result);
});
}
Here's an example of the console output:
Mirandas-MacBook-Pro:zendesktool mirandashort$ node ./controllers/update.js
[ { ticket:
{ ids: '3280805',
requester_email: 'miranda#barkbox.com',
status: 'pending',
comment: [Object],
subject: 'sup dog',
custom_fields: [Object],
add_tags: 'update_via_csv, dogs_are_cool, sup' } } ] undefined
Right now, since my other functions are dependent on this working, I'm only calling it in the file with exports.csvToJSON('update2.csv') where update2.csv is an actual file. Eventually this will be called inside another function in which I will be utilizing async, but I need this to work first. Additionally, that output seems to be linked to console.log(err) when called by the second code block example, which I'm not to sure why.
Or if there's a way to do this altogether without csvtojson, that's fine too. The only requirement be that a file in csv format can be returned as an object array.
Got it. I just used waterfall to put the two individual modeles together:
exports.csvToJSON = function(file, callback) {
csvtojson().fromFile(file, function(err, result) {
if (err) {
console.log(err);
} else {
callback(null, result[0]);
}
});
}
exports.zdUpdateMany = function(data, callback) {
credentials.config.tickets.updateMany(3280805, data, function(err, result) {
if (err) {
console.log(err);
} else {
callback(false, result);
}
});
}
// function to update tickets
exports.processUpdate = function(file, callback) {
async.waterfall([
async.apply(exports.csvToJSON, file),
exports.zdUpdateMany
], function(err, result) {
if (err) {
console.log(err);
} else {
console.log(result);
}
callback(err, result);
});
}

Render JSON data onto ejs view with Expressjs

I am trying to get 2 values from a JSON file on to the webpage.
obj["fruit"] and obj["thyroid"]. I use ejs as the template view engine and expressjs.
The below method says "fruit" and "thyroid" are undefined. The console.log works though.
app.post('/top', function (req, res) {
var obj;
fs.readFile('./object.json', 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
console.log(obj["fruit"]);
console.log(obj["thyroid"]);
});
res.render(
'disp.ejs',
{
food: obj["fruit"]
name: obj["thyroid"]
}); // render
});
fs.readFile(path[, options], callback) is the asynchronous way to read a file. The way your code is setup node will start reading the file and immediately then call res.render before the file data is finished reading.
If you put the res.render inside the callback it will only be called when the file is finished reading and the data variable has what you need.
for example:
app.post('/top', function (req, res) {
var obj;
fs.readFile('./object.json', 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
console.log(obj["fruit"]);
console.log(obj["thyroid"]);
res.render(
'disp.ejs',
{
food: obj["fruit"]
name: obj["thyroid"]
}); // render
});
});

NodeJS access variable from mongodb & write to JSON file

I need to write a field from my mongodb database to a json file. Using node js. How to go about it? I haven't found anything helpful.
Try this
var exportDocuments = function (db, callback) {
// Get the documents collection
var collection = db.collection('documents');
// Find some documents
collection.find({}).toArray(function (err, docs) {
require('fs').writeFile('yourFileName.json', JSON.stringify(docs), function (error) {
if (error) return callback(error);
callback();
});
});
}

NodeJS get request. JSON.parse: unexpected token

I'm writing a function in NodeJS that hits an Url and retrieves its json. But I'm getting an error in JSON.parse: unexpected token.
In json validators the string is passing the test when I copy from the browser and paste into the text field, but when I paste the Url for the parser to get the json it show me an invalid message.
I guess it is something with the encoding of the response, but I can;t figure out what it is. Here if my function with an example Url.
function getJsonFromUrl(url, callback)
{
url = 'http://steamcommunity.com/id/coveirao/inventory/json/730/2/';
http.get(
url
, function (res) {
// explicitly treat incoming data as utf8 (avoids issues with multi-byte chars)
res.setEncoding('utf8');
// incrementally capture the incoming response body
var body = '';
res.on('data', function (d) {
body += d;
});
// do whatever we want with the response once it's done
res.on('end', function () {
console.log(body.stringfy());
try {
var parsed = JSON.parse(body);
} catch (err) {
console.error('Unable to parse response as JSON', err);
return callback(err, null);
}
// pass the relevant data back to the callback
console.log(parsed);
callback(null, parsed);
});
}).on('error', function (err) {
// handle errors with the request itself
console.error('Error with the request:', err.message);
callback(err, null);
});
}
Can you help me, please?
Thanks in advance for any help.
Concatenating response as string might have issues with encoding e.g. if buffer of every chunk is converted to string with partial UTF-8 encodings at beginning or end. Thus I'd advise to concatenate as buffer first:
var body = new Buffer( 0 );
res.on('data', function (d) {
body = Buffer.concat( [ body, d ] );
});
Of course it might help to explicitly convert buffer to string on your behalf rather than relying on JSON.parse() doing it implicitly. This might be essential in case of using unusual encoding.
res.on('end', function () {
try {
var parsed = JSON.parse(body.toString("utf8"));
} catch (err) {
console.error('Unable to parse response as JSON', err);
return callback(err, null);
}
...
Aside from that the content delivered by given URL seems to be pretty valid JSON.

How to return a complex JSON response with Node.js?

Using nodejs and express, I'd like to return one or multiple objects (array) using JSON. In the code below I output one JSON object at a time. It works but this isn't exactly what I want. The response produced isn't a valid JSON response since I have many objects.
I am well aware that I could simply add all objects to an array and return that specific array in res.end. However I am afraid this could become heavy to process and memory intensive.
What is the proper way to acheive this with nodejs? Is query.each the right method to call?
app.get('/users/:email/messages/unread', function(req, res, next) {
var query = MessageInfo
.find({ $and: [ { 'email': req.params.email }, { 'hasBeenRead': false } ] });
res.writeHead(200, { 'Content-Type': 'application/json' });
query.each(function(err, msg) {
if (msg) {
res.write(JSON.stringify({ msgId: msg.fileName }));
} else {
res.end();
}
});
});
On express 3 you can use directly res.json({foo:bar})
res.json({ msgId: msg.fileName })
See the documentation
I don't know if this is really any different, but rather than iterate over the query cursor, you could do something like this:
query.exec(function (err, results){
if (err) res.writeHead(500, err.message)
else if (!results.length) res.writeHead(404);
else {
res.writeHead(200, { 'Content-Type': 'application/json' });
res.write(JSON.stringify(results.map(function (msg){ return {msgId: msg.fileName}; })));
}
res.end();
});
[Edit] After reviewing the Mongoose documentation, it looks like you can send each query result as a separate chunk; the web server uses chunked transfer encoding by default so all you have to do is wrap an array around the items to make it a valid JSON object.
Roughly (untested):
app.get('/users/:email/messages/unread', function(req, res, next) {
var firstItem=true, query=MessageInfo.find(/*...*/);
res.writeHead(200, {'Content-Type': 'application/json'});
query.each(function(docs) {
// Start the JSON array or separate the next element.
res.write(firstItem ? (firstItem=false,'[') : ',');
res.write(JSON.stringify({ msgId: msg.fileName }));
});
res.end(']'); // End the JSON array and response.
});
Alternatively, as you mention, you can simply send the array contents as-is. In this case the response body will be buffered and sent immediately, which may consume a large amount of additional memory (above what is required to store the results themselves) for large result sets. For example:
// ...
var query = MessageInfo.find(/*...*/);
res.writeHead(200, {'Content-Type': 'application/json'});
res.end(JSON.stringify(query.map(function(x){ return x.fileName })));