NodeJS get request. JSON.parse: unexpected token - json

I'm writing a function in NodeJS that hits an Url and retrieves its json. But I'm getting an error in JSON.parse: unexpected token.
In json validators the string is passing the test when I copy from the browser and paste into the text field, but when I paste the Url for the parser to get the json it show me an invalid message.
I guess it is something with the encoding of the response, but I can;t figure out what it is. Here if my function with an example Url.
function getJsonFromUrl(url, callback)
{
url = 'http://steamcommunity.com/id/coveirao/inventory/json/730/2/';
http.get(
url
, function (res) {
// explicitly treat incoming data as utf8 (avoids issues with multi-byte chars)
res.setEncoding('utf8');
// incrementally capture the incoming response body
var body = '';
res.on('data', function (d) {
body += d;
});
// do whatever we want with the response once it's done
res.on('end', function () {
console.log(body.stringfy());
try {
var parsed = JSON.parse(body);
} catch (err) {
console.error('Unable to parse response as JSON', err);
return callback(err, null);
}
// pass the relevant data back to the callback
console.log(parsed);
callback(null, parsed);
});
}).on('error', function (err) {
// handle errors with the request itself
console.error('Error with the request:', err.message);
callback(err, null);
});
}
Can you help me, please?
Thanks in advance for any help.

Concatenating response as string might have issues with encoding e.g. if buffer of every chunk is converted to string with partial UTF-8 encodings at beginning or end. Thus I'd advise to concatenate as buffer first:
var body = new Buffer( 0 );
res.on('data', function (d) {
body = Buffer.concat( [ body, d ] );
});
Of course it might help to explicitly convert buffer to string on your behalf rather than relying on JSON.parse() doing it implicitly. This might be essential in case of using unusual encoding.
res.on('end', function () {
try {
var parsed = JSON.parse(body.toString("utf8"));
} catch (err) {
console.error('Unable to parse response as JSON', err);
return callback(err, null);
}
...
Aside from that the content delivered by given URL seems to be pretty valid JSON.

Related

How to send Post request for each iteration of an array in Node/Express?

I have some raw json that I'm trying to send to my back end server in mysql. I'm currently trying to loop through the specific array in the json that I need and sending data from each of the children in the array via a POST request but I am getting "Cannot set headers after they are sent to the client".
app.post('/reddit-import', function (req, res) {
console.log("Route /reddit-import POST");
let data = req.body.data.children
data.forEach(child => {
let sql1 = `CALL insert_user('${child.data.author}',
'${child.data.author_fullname}');`
connection.query(sql1,
data,
function (errQuery, result) {
if (errQuery) {
console.log(errQuery);
res.json({status: "Error", err: errQuery});
res.end();
} else {
console.log("Insert ID: ", result.insertId);
res.json({status: result.insertId, err: ""});
res.end();
}
}
);
When I send the POST request, my backend gets 2 rows of data before it hits me with the error message...any ideas?
You seem to be ending your outer response in the data.forEach with a res.end(), which I’m assuming is used to indicate the end of the outer HTTP request to the client. Did you perhaps mean to use “result” there instead?
Try this if you need to keep track insert IDs:
app.post('/reddit-import', function(req, res) {
console.log("Route /reddit-import POST");
let data = req.body.data.children
const insertIds = data.map(child => {
return new Promise((resolve, reject) => {
const sql = `CALL insert_user('${child.data.author}', '${child.data.author_fullname}')`;
connection.query(sql, (err, result) => {
if (err) {
console.log(err);
return reject(err);
}
console.log("Insert ID: ", result.insertId);
return resolve(result.insertId);
});
});
});
return Promise.all(insertIds)
.then(ids => {
return res.json({
insertIds: ids
});
})
.catch(err => {
return res.status(500).json({
message: 'got query error'
});
});
});
What this basically does is that on each query, you keep track of the insert IDs. We need to use Promises because the query() function is asynchronous, meaning it runs independently and there's no other way to keep track of the data outside of its function(err, result) callback. Now we have an array of Promises which contains the insert IDs, and what's left is to send a response that this is successful. And in order to do that, we can't simply do res.json(insertIds) because insertIds is an array of Promises and we still need to extract the values. We can easily extract all data at once from an array of Promises by using Promise.all(insertIds).then(ids => ...). If you wish to send a response informing that the request is successful, do so in this then callback. Lastly and most importantly, we handle errors in a Promise chain's .catch() block. This is where you want to send a response informing the client that there are errors.
Some things that we can improve from this solution is to implement rollbacks in case we have errors, and of course validations of parameters. Unfortunately I have to leave this to the OP to implement.
Also, keep in mind you should only send a response once and only once each request.

node.js returning contents of a file using request module

I was learning how to read json data (for the walkscore.com api) in Node.js and I found the request module. I know how to make it perform an action within the "request" block. However, I am calling it within a function and I want the function to return the json data. How would I be able to simply access the "body" variable inside the function, but outside the request block?
var request = require("request")
function loadWalkScore(lat, lon, address, name, state) {
var address = encodeURI(address)
var url = "http://api.walkscore.com/score?format=json&address=" + address
url += "&lat=" + lat + "&lon=" + lon + "&wsapikey=" + wsapikey
request(url, { json: true }, (err, res, body) => {
if (err) { return console.log(err); }
// somehow make loadWalkScore() function return <body>
});
}
You would want to have loadWalkScore accept a callback argument, and call it within your request callback. When writing Node code, your functions rarely actually return the data you asked for, they'll either accept (and eventually invoke) a callback that you provide, or return Promises (which eventually resolve with the requested data). This is how to work with async operations like an HTTP request.
Possibly something like:
function loadWalkScore(lat, lon, address, name, state, cb) {
....
request(url, { json: true }, (err, res, body) => {
if (err) {
console.log(err);
cb(err);
} else {
cb(null, body);
}
});
Then call it like:
loadWalkScore(...args..., function (err, body) {
console.log('body:', body);
})

Analyzing json using Watson API Nodejs

I would like to analyze a JSON file I dynamically create with Watson's tone analyzer. I would like it to read the file, then analyze it.
How can I make the tone_analyzer.tone method read the file? Thank you.
app.get('/results', function(req, res) {
// This is the json file I want to analyze
fs.readFile('./output.json', null, cb);
function cb() {
tone_analyzer.tone({
// How can I pass the file here?
text: ''
},
function(err, tone) {
if (err)
console.log(err);
else
console.log(JSON.stringify(tone, null, 2));
});
console.log('Finished reading file.')
}
res.render('results');
})
Your callback is missing a couple of arguments (error, data) (see the node fs documentation for more info). Data is the content of your file and would go where you are sending the text.
Try something like this:
app.get('/results', function(req, res) {
// This is the json file I want to analyze
fs.readFile('./output.json', 'utf8', cb);
function cb(error, data) {
if (error) throw error;
tone_analyzer.tone({
// How can I pass the file here?
text: data
},
function(err, tone) {
if (err)
console.log(err);
else
console.log(JSON.stringify(tone, null, 2));
});
console.log('Finished reading file.')
}
res.render('results');
})
Thanks to user Aldo Sanchez for his tip. I converted the input into JSON first since fs was returning it in the form of buffer data. Also, I made it search for the specific value in the key/value pair and return that content, instead of returning the whole string. This can be directly inputted to Watson's tone analyzer.
var data = fs.readFileSync('./output.json', null);
JSON.parse(data, function(key, value) {
if (key == "message") {
cb(value);
}
function cb(value, err) {
if (err) throw err;
tone_analyzer.tone({
text: value
},
function(err, tone) {
if (err)
console.log(err);
else
console.log(tone);
});
}
console.log('Finished reading file.')
});

I'm not using Express, but I'm trying extract POST data then to send a json as response, with no luck

I'm still very new to node.js so please bear with me.
I'm trying to extract POST data then sent a json as response, but I don't seem to be able to extract the data from the POST request and even worse I can't find the syntax for people who are NOT using Express to send the json. It keeps telling me res.json is not a function.
EDIT: I found out the problem for the json part, I was a dump. I finally remember what I was told, json are sent like strings.
var http = require('http');
var qs = require("querystring");
server = http.createServer(function (req, res) {
try {
var body = "";
var post = qs.parse("");
if (req.method == "POST") {
res.writeHeader(200, {"Content-Type": "application/json"});
req.on("data", function (data) {
body += data;
console.log(data); //It gives something like <Buffer xx xx xx ...>
if (body.length > 1e6)
req.connection.destroy();
});
req.on("end", function () {
post = qs.parse(body);
console.log(post.test); //It gives "undefined"
});
res.end(JSON.stringify({ a: 1 }));
} catch(err) {
console.dir(err);
res.writeHeader(200, {"Content-Type": "text/plain"});
res.end("Hi hi");
}
});
server.listen(8000);
console.log("http start #8000");
Any help? Thanks in advance.
below solves the date to string (i.e. converting buffer to string
res.on('data', function(chunk) {
var textChunk = chunk.toString('utf8');
// console.log(textChunk); // will give you a stream of text from data
});
you could store textChunk out of the ondata handler, to then use that if required (say returning relevant data to the user back again)

How to return a complex JSON response with Node.js?

Using nodejs and express, I'd like to return one or multiple objects (array) using JSON. In the code below I output one JSON object at a time. It works but this isn't exactly what I want. The response produced isn't a valid JSON response since I have many objects.
I am well aware that I could simply add all objects to an array and return that specific array in res.end. However I am afraid this could become heavy to process and memory intensive.
What is the proper way to acheive this with nodejs? Is query.each the right method to call?
app.get('/users/:email/messages/unread', function(req, res, next) {
var query = MessageInfo
.find({ $and: [ { 'email': req.params.email }, { 'hasBeenRead': false } ] });
res.writeHead(200, { 'Content-Type': 'application/json' });
query.each(function(err, msg) {
if (msg) {
res.write(JSON.stringify({ msgId: msg.fileName }));
} else {
res.end();
}
});
});
On express 3 you can use directly res.json({foo:bar})
res.json({ msgId: msg.fileName })
See the documentation
I don't know if this is really any different, but rather than iterate over the query cursor, you could do something like this:
query.exec(function (err, results){
if (err) res.writeHead(500, err.message)
else if (!results.length) res.writeHead(404);
else {
res.writeHead(200, { 'Content-Type': 'application/json' });
res.write(JSON.stringify(results.map(function (msg){ return {msgId: msg.fileName}; })));
}
res.end();
});
[Edit] After reviewing the Mongoose documentation, it looks like you can send each query result as a separate chunk; the web server uses chunked transfer encoding by default so all you have to do is wrap an array around the items to make it a valid JSON object.
Roughly (untested):
app.get('/users/:email/messages/unread', function(req, res, next) {
var firstItem=true, query=MessageInfo.find(/*...*/);
res.writeHead(200, {'Content-Type': 'application/json'});
query.each(function(docs) {
// Start the JSON array or separate the next element.
res.write(firstItem ? (firstItem=false,'[') : ',');
res.write(JSON.stringify({ msgId: msg.fileName }));
});
res.end(']'); // End the JSON array and response.
});
Alternatively, as you mention, you can simply send the array contents as-is. In this case the response body will be buffered and sent immediately, which may consume a large amount of additional memory (above what is required to store the results themselves) for large result sets. For example:
// ...
var query = MessageInfo.find(/*...*/);
res.writeHead(200, {'Content-Type': 'application/json'});
res.end(JSON.stringify(query.map(function(x){ return x.fileName })));