xml2js: put parser into a function - json

I got this code on my NodeJS server:
function GetXML() {
fs.readFile('../slideshow.xml.old', function(err, data) {
parser.parseString(data, function (err, result) {
var json = JSON.stringify(result);
console.log(json);
return json;
});
});
}
The console.log() is working well but this is not:
.get('/', function(req, res) {
res.end(GetXML());
};
It returns undefined which is quite logic because functions are nested (I think ?). But I don't know how to make GetXML() returning a value.

It's returning undefined because you're trying to execute synchronously an asynchronous task. You have to pass a callback to your GetXML() function, like:
function GetXML(cb) {
fs.readFile('../slideshow.xml.old', function(err, data) {
parser.parseString(data, function (err, result) {
var json = JSON.stringify(result);
cb(json);
});
});
}
, and call it properly in your .get function:
.get('/', function(req, res) {
GetXML(function (json) {
res.end(json);
});
};
You should take a look at this article that explains how callbacks work in node.js.

"GetXML" is not returning a value. It can be change to:
function GetXML() {
return fs.readFile('../slideshow.xml.old', function(err, data) {
parser.parseString(data, function (err, result) {
var json = JSON.stringify(result);
console.log(json);
return json;
});
});
}

Related

Can't get data by id using NodeJS and MySQL

I am new to NodeJs and I want to get some information by id. Here is my code The controller :
router.get('/machine', function (req, res) {
Machine.getmachine(req.body, function (err, row) {
if (err) {
res.status(400).json(err);
}
else {
res.json(row);
}
});
});
The SQL part :
getmachine: function (Machine, callback) {
return db.query('SELECT * from machine WHERE id=?', [Machine.id], callback);
},
I tried to test it with Postman and I only got {} as a result.
Please, can you tell me why I don't get what I want?
Try something like this instead of reading the body data you should read it as query. or else if you are passing as params you should user req.params('id') to get the id.
router.get('/machine', function (req, res) {
Machine.getmachine(req.query.id, function (err, row) {
if (err) {
res.status(400).json(err);
}
else {
res.json(row);
}
});
});
Edit
According to the url you provided in the comment you can try something like this.
router.get('/machine:id', function (req, res) {
Machine.getmachine(req.params.id, function (err, row) {
if (err) {
res.status(400).json(err);
}
else {
res.json(row);
}
});
});
If you managed to get the id to the back-end then the issue must be with in you sql query. try something like this
getmachine: function (Id, callback) {
return db.query('SELECT * from machine WHERE id=?', Id, callback);
}

Integrate mysql in waterfall async

I need to save to db inside an async waterfall series.
I've tried to integrate these two function after the clean function
function connectDb(next) {
pool.getConnection(function(err, connection) {
if (err) console.log(err);
conn = connection;
}, next);
},
function saveDb(next) {
let sql = "UPDATE media SET media_url = ? WHERE media_url = ?";
conn.query(sql, [dstKey, srcKey], function (error, results, fields) {
if (error) {
conn.release();
console.log(error);
}else{
console.log("media db updated");
}
}, next)
}
The problem is that these two functions block the code execution. How can I integrate it in the function below? I've tried to wrap the function in promise but it is also not working.
async.waterfall([
function download(next) {
s3.getObject({
//param
},
next);
},
function transform(response, next) {
resizeMedia(response.Body ).then( ( file ) => { next();} ).catch( (err) => { reject(err) } ); }
},
function upload(next) {
var fileData = fs.createReadStream('/tmp/'+dstKey);
if (isVideo ) { var ContentType = 'video/mp4' }
if (isAudio ) { var ContentType = 'audio/mp3' }
s3.putObject({
//param
},
next);
},
function clean(next) {
// Stream the transformed image to a different S3 bucket.
fs.unlinkSync('/tmp/'+dstKey);
s3.deleteObject({
//param
},
next);
}
], function (err) {
if (err) {
console.error('Error');
callback(null, "Error");
return;
} else {
console.log('Success');
callback(null, "Done");
return;
}
callback(null, "Done");
return;
}
);
The purpose of async waterflow is to block the waterfall until the callback is called.
P.S. Usually you should not create a new db connection each time. The connection should be done once when the application start and get used whenever you need.
I highly recommend you to use knex.js instead, it return promises by default and if you like to use it inside async waterfall (and wait for resolve) you can call .asCallback.
I've found the problem, if someone fall into the same issue here my solution:
If a waterfall function has a response, this response is automatically added as first argument in the next function. In my code the mistake was simple (after night's sleep), the s3.deleteObject and s3.putObject has response, this response need to be setted as first argument and the callback as last, as you say I've used only the callback as argument (next) and this broke my code.
[...]
function upload(next) {
s3.putObject({
//param
},
next);
},
function clean(response, next) { // response in arguments
s3.deleteObject({
//param
},
next);
}
[...]

Calling a function which calls Async mysql

I want to call a function that executes a query using npm-mysql .query function. The problem is that .query is asynchronous so I get a returned value = undefined and after that the mysql.query finishes the execution.
I 've tried to use promises but I couldn't synchronize the return value with the mysql.query result.
I don't want to use sync-mysql.
I want it to be in a wrapper function as shown.
function mysql_select(query)
{
var json_result
mysql_connnection.query(query, function (err, result)
{
if (err) throw err
json_result = JSON.stringify(result)
})
return json_result
}
For example i want to call this function like this:
console.log(mysql_select("SELECT * FROM table"))
and dont get the undefined result
I have checked the query , it returns the data correctly but after the function returns the json_result.
You might want to have a look into Promises:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
function mysql_select(query)
{
return new Promise(function(resolve, reject) {
mysql_connnection.query(query, function (err, result)
{
if (err) {
reject(err);
return;
}
resolve(JSON.stringify(result));
})
})
}
;(async function() {
console.log(await mysql_select('SELECT * FROM table'));
})();

nodejs-async: return mysql query result in for loop

I have three operations to do one after another
1.Fetch some rows from db
2.Another mysql query in forloop for getting some data and store in variable
3.Display the data
For that i am using async waterfall method.
async.waterfall([
function(callback){
//first sql and pass the result to second function
collection.getAllCollections(function (status,error,result) {
callback(null,result);
});
},
//running another query in loop with result with previous
function(result, callback){
for(var i=0;i<result.length;i++){
collection.getImages(result[i].id,function (status,error,user) {
//append the query result to old result
result[i]['users'] = user;
});
}
callback(null,result);
}
], function (err, result) {
console.log("result",result);
});
But the problem final result does not contains the user results because the second query(query in for loop is asynchronous)
You realised the problem at hand. Your callback basically has to wait for the for loop to end.
For example like this:
async.waterfall([
function(next){
//first sql and pass the result to second function
collection.getAllCollections(function (status,error,result) {
next(null, result);
});
},
function(result, next){
var calls = [];
//putting every call in an array
result.forEach(function(resultObject){
calls.push(function(callback) {
collection.getImages(resultObject.id, function (status, error, user) {
resultObject['users'] = user;
callback(null, resultObject);
});
}
)});
//performing calls async parallel to each other
async.parallel(calls, function(err, results) {
//executed as soon as all calls are finished
if (err) {
next(err, null);
} else {
next(null, results);
}
});
}
], function (err, result) {
console.log("result",result);
});
Documentation: http://caolan.github.io/async/docs.html#parallel

Using Lodash to loop through https.get when parsing XML to JSON in Node

I'm trying to parse XML to JSON in Node. I'm using xml2js. I'd like to incorporate Lodash to loop through each number in an array and use the corresponding url to convert the XML to JSON. When I use the code below, I get a Non-whitespace before first tag error. Any idea what I'm doing wrong?
const no = [78787878,78787879, 787878780];
_.forEach(no, https.get('https://tsdrapi.uspto.gov/ts/cd/casestatus/'+no+'/info.xml', function (res) {
res.on('data', function (chunk) {
response_data += chunk;
});
res.on('end', function () {
parser.parseString(response_data, function (err, result) {
if (err) {
console.log('Got error: ' + err.message);
} else {
console.log(util.inspect(result, false, null));
}
});
});
res.on('error', function (err) {
console.log('Got error: ' + err.message);
});
}));
Honestly the forEach helper in LoDash is kind of silly. forEach is a prototype method of any Array instance. One problem is that these functional helpers are not designed to handle async flow control.
While there are a dozen ways to handle flow control the easiest would probably be to use caolan/async module's map() method.
You're code would look something like:
var no = [78787878,78787879, 787878780];
async.map(no, function(cb) {
https.get('https://tsdrapi.uspto.gov/ts/cd/casestatus/'+no+'/info.xml', function (res) {
var response_data = '';
res.on('data', function (chunk) {
response_data += chunk;
});
res.on('end', function () {
parser.parseString(response_data, function (err, result) {
if (err) {
cb(err);
} else {
cb(null, result);
}
});
});
res.on('error', cb);
})
}, function(err, results) {
if(err) {
console.log("Error occured: ", err);
}
else {
console.log("Results(array): ", results);
}
});
The difference here is that async maps the array to a function with a callback. This way you can gather the response from each request into an array and fire a callback when each request has responded. If one of them error's out the process stops and fires the final callback where the error is logged(or you can write logic to handle another way).